var/home/core/zuul-output/0000755000175000017500000000000015116635517014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116643561015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003622072315116643553017713 0ustar rootrootDec 11 21:48:26 crc systemd[1]: Starting Kubernetes Kubelet... Dec 11 21:48:26 crc restorecon[4702]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:26 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 21:48:27 crc restorecon[4702]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 11 21:48:27 crc kubenswrapper[4956]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 21:48:27 crc kubenswrapper[4956]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 11 21:48:27 crc kubenswrapper[4956]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 21:48:27 crc kubenswrapper[4956]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 21:48:27 crc kubenswrapper[4956]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 11 21:48:27 crc kubenswrapper[4956]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.853232 4956 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.856981 4956 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857000 4956 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857006 4956 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857011 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857017 4956 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857022 4956 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857027 4956 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857032 4956 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857037 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857043 4956 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857047 4956 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857052 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857057 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857061 4956 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857067 4956 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857072 4956 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857076 4956 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857088 4956 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857093 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857098 4956 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857103 4956 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857108 4956 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857113 4956 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857118 4956 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857123 4956 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857127 4956 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857132 4956 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857137 4956 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857141 4956 feature_gate.go:330] unrecognized feature gate: Example Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857146 4956 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857151 4956 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857156 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857160 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857165 4956 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857171 4956 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857184 4956 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857191 4956 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857196 4956 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857201 4956 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857207 4956 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857213 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857218 4956 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857223 4956 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857227 4956 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857232 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857237 4956 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857242 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857247 4956 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857252 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857257 4956 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857262 4956 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857267 4956 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857274 4956 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857280 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857285 4956 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857290 4956 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857295 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857299 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857304 4956 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857309 4956 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857313 4956 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857318 4956 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857323 4956 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857328 4956 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857332 4956 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857338 4956 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857344 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857349 4956 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857354 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857360 4956 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.857368 4956 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857624 4956 flags.go:64] FLAG: --address="0.0.0.0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857638 4956 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857649 4956 flags.go:64] FLAG: --anonymous-auth="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857656 4956 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857663 4956 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857668 4956 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857677 4956 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857684 4956 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857691 4956 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857697 4956 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857704 4956 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857710 4956 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857715 4956 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857721 4956 flags.go:64] FLAG: --cgroup-root="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857726 4956 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857732 4956 flags.go:64] FLAG: --client-ca-file="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857738 4956 flags.go:64] FLAG: --cloud-config="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857744 4956 flags.go:64] FLAG: --cloud-provider="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857749 4956 flags.go:64] FLAG: --cluster-dns="[]" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857756 4956 flags.go:64] FLAG: --cluster-domain="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857761 4956 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857796 4956 flags.go:64] FLAG: --config-dir="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857803 4956 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857809 4956 flags.go:64] FLAG: --container-log-max-files="5" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857816 4956 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857822 4956 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857828 4956 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857834 4956 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857839 4956 flags.go:64] FLAG: --contention-profiling="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857845 4956 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857850 4956 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857856 4956 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857864 4956 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857871 4956 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857876 4956 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857882 4956 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857887 4956 flags.go:64] FLAG: --enable-load-reader="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857893 4956 flags.go:64] FLAG: --enable-server="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857899 4956 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857907 4956 flags.go:64] FLAG: --event-burst="100" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857913 4956 flags.go:64] FLAG: --event-qps="50" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857919 4956 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857925 4956 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857930 4956 flags.go:64] FLAG: --eviction-hard="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857937 4956 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857942 4956 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857948 4956 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857954 4956 flags.go:64] FLAG: --eviction-soft="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857959 4956 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857965 4956 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857970 4956 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857976 4956 flags.go:64] FLAG: --experimental-mounter-path="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857981 4956 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857987 4956 flags.go:64] FLAG: --fail-swap-on="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857993 4956 flags.go:64] FLAG: --feature-gates="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.857999 4956 flags.go:64] FLAG: --file-check-frequency="20s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858005 4956 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858011 4956 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858017 4956 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858023 4956 flags.go:64] FLAG: --healthz-port="10248" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858029 4956 flags.go:64] FLAG: --help="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858034 4956 flags.go:64] FLAG: --hostname-override="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858039 4956 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858045 4956 flags.go:64] FLAG: --http-check-frequency="20s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858051 4956 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858056 4956 flags.go:64] FLAG: --image-credential-provider-config="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858061 4956 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858067 4956 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858077 4956 flags.go:64] FLAG: --image-service-endpoint="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858083 4956 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858089 4956 flags.go:64] FLAG: --kube-api-burst="100" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858095 4956 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858102 4956 flags.go:64] FLAG: --kube-api-qps="50" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858108 4956 flags.go:64] FLAG: --kube-reserved="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858114 4956 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858119 4956 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858125 4956 flags.go:64] FLAG: --kubelet-cgroups="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858131 4956 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858136 4956 flags.go:64] FLAG: --lock-file="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858142 4956 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858147 4956 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858153 4956 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858161 4956 flags.go:64] FLAG: --log-json-split-stream="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858167 4956 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858172 4956 flags.go:64] FLAG: --log-text-split-stream="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858178 4956 flags.go:64] FLAG: --logging-format="text" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858184 4956 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858190 4956 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858200 4956 flags.go:64] FLAG: --manifest-url="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858206 4956 flags.go:64] FLAG: --manifest-url-header="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858213 4956 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858219 4956 flags.go:64] FLAG: --max-open-files="1000000" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858226 4956 flags.go:64] FLAG: --max-pods="110" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858231 4956 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858237 4956 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858243 4956 flags.go:64] FLAG: --memory-manager-policy="None" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858248 4956 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858254 4956 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858259 4956 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858265 4956 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858278 4956 flags.go:64] FLAG: --node-status-max-images="50" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858283 4956 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858289 4956 flags.go:64] FLAG: --oom-score-adj="-999" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858295 4956 flags.go:64] FLAG: --pod-cidr="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858302 4956 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858310 4956 flags.go:64] FLAG: --pod-manifest-path="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858316 4956 flags.go:64] FLAG: --pod-max-pids="-1" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858322 4956 flags.go:64] FLAG: --pods-per-core="0" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858327 4956 flags.go:64] FLAG: --port="10250" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858337 4956 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858343 4956 flags.go:64] FLAG: --provider-id="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858349 4956 flags.go:64] FLAG: --qos-reserved="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858355 4956 flags.go:64] FLAG: --read-only-port="10255" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858361 4956 flags.go:64] FLAG: --register-node="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858366 4956 flags.go:64] FLAG: --register-schedulable="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858372 4956 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858381 4956 flags.go:64] FLAG: --registry-burst="10" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858386 4956 flags.go:64] FLAG: --registry-qps="5" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858392 4956 flags.go:64] FLAG: --reserved-cpus="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858397 4956 flags.go:64] FLAG: --reserved-memory="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858404 4956 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858412 4956 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858418 4956 flags.go:64] FLAG: --rotate-certificates="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858424 4956 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858429 4956 flags.go:64] FLAG: --runonce="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858435 4956 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858441 4956 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858446 4956 flags.go:64] FLAG: --seccomp-default="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858452 4956 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858457 4956 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858463 4956 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858469 4956 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858475 4956 flags.go:64] FLAG: --storage-driver-password="root" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858480 4956 flags.go:64] FLAG: --storage-driver-secure="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858486 4956 flags.go:64] FLAG: --storage-driver-table="stats" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858491 4956 flags.go:64] FLAG: --storage-driver-user="root" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858497 4956 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858502 4956 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858509 4956 flags.go:64] FLAG: --system-cgroups="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858516 4956 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858525 4956 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858531 4956 flags.go:64] FLAG: --tls-cert-file="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858536 4956 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858543 4956 flags.go:64] FLAG: --tls-min-version="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858548 4956 flags.go:64] FLAG: --tls-private-key-file="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858554 4956 flags.go:64] FLAG: --topology-manager-policy="none" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858560 4956 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858565 4956 flags.go:64] FLAG: --topology-manager-scope="container" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858571 4956 flags.go:64] FLAG: --v="2" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858578 4956 flags.go:64] FLAG: --version="false" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858586 4956 flags.go:64] FLAG: --vmodule="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858593 4956 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.858601 4956 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858736 4956 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858744 4956 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858749 4956 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858755 4956 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858760 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858766 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858789 4956 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858794 4956 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858799 4956 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858804 4956 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858811 4956 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858816 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858821 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858826 4956 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858832 4956 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858837 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858842 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858847 4956 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858852 4956 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858857 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858862 4956 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858867 4956 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858871 4956 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858877 4956 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858882 4956 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858887 4956 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858891 4956 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858897 4956 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858903 4956 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858909 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858915 4956 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858924 4956 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858933 4956 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858939 4956 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858945 4956 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858951 4956 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858957 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858962 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858968 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858973 4956 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858979 4956 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858985 4956 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858990 4956 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.858996 4956 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859002 4956 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859009 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859015 4956 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859021 4956 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859026 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859031 4956 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859036 4956 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859041 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859046 4956 feature_gate.go:330] unrecognized feature gate: Example Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859050 4956 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859058 4956 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859063 4956 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859068 4956 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859073 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859077 4956 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859083 4956 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859088 4956 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859093 4956 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859098 4956 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859105 4956 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859114 4956 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859119 4956 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859124 4956 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859129 4956 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859134 4956 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859140 4956 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.859146 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.859167 4956 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.867960 4956 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.867987 4956 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.868191 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.868225 4956 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.868230 4956 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869006 4956 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869053 4956 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869064 4956 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869073 4956 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869080 4956 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869088 4956 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869095 4956 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869104 4956 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869111 4956 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869118 4956 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869124 4956 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869130 4956 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869142 4956 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869147 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869153 4956 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869158 4956 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869163 4956 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869200 4956 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869211 4956 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869216 4956 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869221 4956 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869225 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869230 4956 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869236 4956 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869241 4956 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869250 4956 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869255 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869260 4956 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869265 4956 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869270 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869275 4956 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869280 4956 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869285 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869299 4956 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869309 4956 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869321 4956 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869330 4956 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869340 4956 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869345 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869350 4956 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869355 4956 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869360 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869365 4956 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869370 4956 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869383 4956 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869388 4956 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869393 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869398 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869403 4956 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869408 4956 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869417 4956 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869422 4956 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869427 4956 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869667 4956 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869696 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869702 4956 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869824 4956 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869832 4956 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869840 4956 feature_gate.go:330] unrecognized feature gate: Example Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869844 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869848 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869852 4956 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869855 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869859 4956 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869863 4956 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869866 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869870 4956 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.869873 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.869880 4956 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870010 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870015 4956 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870018 4956 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870022 4956 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870026 4956 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870029 4956 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870033 4956 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870037 4956 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870040 4956 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870046 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870049 4956 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870053 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870057 4956 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870061 4956 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870066 4956 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870071 4956 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870075 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870079 4956 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870083 4956 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870087 4956 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870090 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870094 4956 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870098 4956 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870102 4956 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870105 4956 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870110 4956 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870114 4956 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870117 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870122 4956 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870126 4956 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870130 4956 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870134 4956 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870138 4956 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870142 4956 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870146 4956 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870150 4956 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870154 4956 feature_gate.go:330] unrecognized feature gate: Example Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870158 4956 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870161 4956 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870165 4956 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870169 4956 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870172 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870176 4956 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870179 4956 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870183 4956 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870186 4956 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870190 4956 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870193 4956 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870197 4956 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870200 4956 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870204 4956 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870207 4956 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870210 4956 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870214 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870218 4956 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870221 4956 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870225 4956 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870228 4956 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870232 4956 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870235 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870239 4956 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870243 4956 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870248 4956 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870251 4956 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870255 4956 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870259 4956 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870263 4956 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870267 4956 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870270 4956 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870274 4956 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.870277 4956 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.870283 4956 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.870612 4956 server.go:940] "Client rotation is on, will bootstrap in background" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.872894 4956 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.872962 4956 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.873442 4956 server.go:997] "Starting client certificate rotation" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.873457 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.873886 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-21 17:54:44.396418736 +0000 UTC Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.873995 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.878121 4956 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.880149 4956 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.880239 4956 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.894651 4956 log.go:25] "Validated CRI v1 runtime API" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.917079 4956 log.go:25] "Validated CRI v1 image API" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.919403 4956 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.921870 4956 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-11-21-43-37-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.921905 4956 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.948068 4956 manager.go:217] Machine: {Timestamp:2025-12-11 21:48:27.94498215 +0000 UTC m=+0.389360340 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:38ee1c6a-2793-48ed-96fb-b9b725b90f32 BootID:b13d9006-a946-432b-9df3-08f296d9a158 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1e:46:72 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1e:46:72 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a5:b1:bf Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:c9:b1:14 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:18:01:31 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:9c:73:ae Speed:-1 Mtu:1496} {Name:eth10 MacAddress:c6:c6:7d:63:ca:04 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:a6:28:98:ad:00:72 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.948363 4956 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.948562 4956 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949141 4956 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949375 4956 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949413 4956 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949628 4956 topology_manager.go:138] "Creating topology manager with none policy" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949642 4956 container_manager_linux.go:303] "Creating device plugin manager" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949804 4956 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.949857 4956 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.951509 4956 state_mem.go:36] "Initialized new in-memory state store" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.951612 4956 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.952361 4956 kubelet.go:418] "Attempting to sync node with API server" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.952384 4956 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.952402 4956 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.952418 4956 kubelet.go:324] "Adding apiserver pod source" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.952431 4956 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.954647 4956 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.955395 4956 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.956753 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.956848 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.956896 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.956997 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.957096 4956 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.957908 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.957950 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.957967 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.957980 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958002 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958015 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958029 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958060 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958078 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958091 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958113 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958125 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.958675 4956 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.959432 4956 server.go:1280] "Started kubelet" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.960166 4956 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.960217 4956 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.960635 4956 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.961106 4956 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 11 21:48:27 crc systemd[1]: Started Kubernetes Kubelet. Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.961753 4956 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.83:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188047891186eb25 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 21:48:27.959380773 +0000 UTC m=+0.403758983,LastTimestamp:2025-12-11 21:48:27.959380773 +0000 UTC m=+0.403758983,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.962502 4956 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.962538 4956 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.962708 4956 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 06:27:46.574163927 +0000 UTC Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.962735 4956 server.go:460] "Adding debug handlers to kubelet server" Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.962831 4956 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.963062 4956 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.963093 4956 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.963225 4956 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.964256 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="200ms" Dec 11 21:48:27 crc kubenswrapper[4956]: W1211 21:48:27.964587 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:27 crc kubenswrapper[4956]: E1211 21:48:27.964670 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.964701 4956 factory.go:55] Registering systemd factory Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.964729 4956 factory.go:221] Registration of the systemd container factory successfully Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.965919 4956 factory.go:153] Registering CRI-O factory Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.965953 4956 factory.go:221] Registration of the crio container factory successfully Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.966231 4956 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.966265 4956 factory.go:103] Registering Raw factory Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.966285 4956 manager.go:1196] Started watching for new ooms in manager Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.967191 4956 manager.go:319] Starting recovery of all containers Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983270 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983315 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983325 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983334 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983343 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983353 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983363 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983372 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983383 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983392 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983401 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983411 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983420 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983431 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983442 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983453 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983463 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983472 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983481 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983490 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983499 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983508 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983518 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983536 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983546 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983582 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983594 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983628 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983639 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983649 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983657 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983668 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983678 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983716 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983728 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983737 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983745 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983754 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983780 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983794 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983804 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983814 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983824 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983834 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983843 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983852 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983862 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983876 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983890 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983903 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983916 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983930 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983947 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983962 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983975 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.983988 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984000 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984014 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984025 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984037 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984048 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984063 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984074 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984089 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984101 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984113 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984125 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984137 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984149 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984162 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984174 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984187 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984198 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984210 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984222 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984232 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984242 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984253 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984264 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984286 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984297 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984309 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984322 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984333 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984344 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984358 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984370 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984381 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984395 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984407 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984418 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984429 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984441 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984453 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984464 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984477 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984490 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984501 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984513 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984526 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984538 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984548 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984559 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984570 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984586 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984599 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984611 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984624 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984636 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984649 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984661 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984675 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984688 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984703 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984718 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984730 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984743 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984755 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984788 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984801 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984815 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984829 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984840 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984852 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984864 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984876 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984887 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984900 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984912 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984924 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984935 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984947 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984958 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984971 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984983 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.984995 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985008 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985020 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985032 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985045 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985057 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985070 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985083 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985097 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985108 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985121 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985848 4956 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985886 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985899 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985911 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985925 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985937 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985951 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985964 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985976 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.985988 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986002 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986027 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986045 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986065 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986084 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986104 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986121 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986134 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986146 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986168 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986180 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986221 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986235 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986254 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986266 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986279 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986291 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986303 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986314 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986327 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986339 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986351 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986362 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986375 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986387 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986399 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986411 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986422 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986437 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986449 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986462 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986473 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986484 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986497 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986509 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986523 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986535 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986548 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986559 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986572 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986585 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986599 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986613 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986626 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986639 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986652 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986666 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986678 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986691 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986705 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986718 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986732 4956 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986745 4956 reconstruct.go:97] "Volume reconstruction finished" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.986753 4956 reconciler.go:26] "Reconciler: start to sync state" Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.989927 4956 manager.go:324] Recovery completed Dec 11 21:48:27 crc kubenswrapper[4956]: I1211 21:48:27.999398 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.001643 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.001683 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.001693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.003939 4956 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.003973 4956 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.004030 4956 state_mem.go:36] "Initialized new in-memory state store" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.013949 4956 policy_none.go:49] "None policy: Start" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.014634 4956 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.014661 4956 state_mem.go:35] "Initializing new in-memory state store" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.017882 4956 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.019838 4956 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.019909 4956 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.019954 4956 kubelet.go:2335] "Starting kubelet main sync loop" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.020019 4956 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 11 21:48:28 crc kubenswrapper[4956]: W1211 21:48:28.020939 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.020988 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.063586 4956 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.073989 4956 manager.go:334] "Starting Device Plugin manager" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074042 4956 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074199 4956 server.go:79] "Starting device plugin registration server" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074587 4956 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074602 4956 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074805 4956 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074924 4956 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.074933 4956 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.083039 4956 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.120694 4956 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.120871 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.121950 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122003 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122017 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122176 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122418 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122463 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122847 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122881 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122893 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.122977 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123069 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123117 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123314 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123349 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123361 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123911 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123943 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.123955 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124039 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124136 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124175 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124376 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124396 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124409 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124788 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124821 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124835 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124929 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.124984 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125003 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125014 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125032 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125071 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125827 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125858 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125917 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125934 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.125944 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.126082 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.126117 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.126798 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.126839 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.126857 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.164964 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="400ms" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.175140 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.176755 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.176993 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.177050 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.177086 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.177830 4956 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.83:6443: connect: connection refused" node="crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.188716 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.188839 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.188894 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.188946 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189196 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189269 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189339 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189417 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189505 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189574 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189619 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189652 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189687 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189719 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.189750 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.290966 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291031 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291066 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291096 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291127 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291156 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291185 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291215 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291227 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291265 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291294 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291244 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291396 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291396 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291397 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291459 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291414 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291441 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291401 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291536 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291564 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291593 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291676 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291691 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291712 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291805 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291843 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291927 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.291929 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.292029 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.378409 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.379804 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.379888 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.379902 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.379938 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.380573 4956 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.83:6443: connect: connection refused" node="crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.466417 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.481051 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: W1211 21:48:28.490387 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-1427ddf45a18381b04bd92b6a713c9b596ead8d7d2b9a84cbd249a8c96d63ab1 WatchSource:0}: Error finding container 1427ddf45a18381b04bd92b6a713c9b596ead8d7d2b9a84cbd249a8c96d63ab1: Status 404 returned error can't find the container with id 1427ddf45a18381b04bd92b6a713c9b596ead8d7d2b9a84cbd249a8c96d63ab1 Dec 11 21:48:28 crc kubenswrapper[4956]: W1211 21:48:28.498134 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-e23b81b09632204c4f11d90ffbfb0953ae290a2f4cca9f4e2a971d58e3fe7f63 WatchSource:0}: Error finding container e23b81b09632204c4f11d90ffbfb0953ae290a2f4cca9f4e2a971d58e3fe7f63: Status 404 returned error can't find the container with id e23b81b09632204c4f11d90ffbfb0953ae290a2f4cca9f4e2a971d58e3fe7f63 Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.505367 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.520558 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: W1211 21:48:28.524117 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-196286b6fd405cb879b167a063c2ae2e1ada38777899be7d9ef7302cbea93077 WatchSource:0}: Error finding container 196286b6fd405cb879b167a063c2ae2e1ada38777899be7d9ef7302cbea93077: Status 404 returned error can't find the container with id 196286b6fd405cb879b167a063c2ae2e1ada38777899be7d9ef7302cbea93077 Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.526647 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 21:48:28 crc kubenswrapper[4956]: W1211 21:48:28.537099 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-0b342dc6d27ce6e978aed6bb2e45696666a9f7f7df72b80da6fc3d751e27f1a0 WatchSource:0}: Error finding container 0b342dc6d27ce6e978aed6bb2e45696666a9f7f7df72b80da6fc3d751e27f1a0: Status 404 returned error can't find the container with id 0b342dc6d27ce6e978aed6bb2e45696666a9f7f7df72b80da6fc3d751e27f1a0 Dec 11 21:48:28 crc kubenswrapper[4956]: W1211 21:48:28.543488 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a6545a1e1239f555698a986cb75ac2fa1475e3a9463033d2dbe6a198967ed2d2 WatchSource:0}: Error finding container a6545a1e1239f555698a986cb75ac2fa1475e3a9463033d2dbe6a198967ed2d2: Status 404 returned error can't find the container with id a6545a1e1239f555698a986cb75ac2fa1475e3a9463033d2dbe6a198967ed2d2 Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.566526 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="800ms" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.781511 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.782792 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.782837 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.782846 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.782871 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:28 crc kubenswrapper[4956]: E1211 21:48:28.783271 4956 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.83:6443: connect: connection refused" node="crc" Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.962181 4956 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:28 crc kubenswrapper[4956]: I1211 21:48:28.963222 4956 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 10:49:14.593649137 +0000 UTC Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.029656 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519" exitCode=0 Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.029747 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.029875 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1427ddf45a18381b04bd92b6a713c9b596ead8d7d2b9a84cbd249a8c96d63ab1"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.029973 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031238 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031273 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031292 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031726 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954" exitCode=0 Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031790 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031812 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a6545a1e1239f555698a986cb75ac2fa1475e3a9463033d2dbe6a198967ed2d2"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.031863 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.032754 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.032824 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.032837 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.032920 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033265 4956 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b" exitCode=0 Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033350 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033415 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0b342dc6d27ce6e978aed6bb2e45696666a9f7f7df72b80da6fc3d751e27f1a0"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033548 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.033584 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.034845 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.034878 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.034890 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.036740 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.036842 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"196286b6fd405cb879b167a063c2ae2e1ada38777899be7d9ef7302cbea93077"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.039922 4956 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="fc7ec9d6a61f8cb94e02f5d30014e6525134ccd67d8d9e93418e8d84316c8433" exitCode=0 Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.039961 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"fc7ec9d6a61f8cb94e02f5d30014e6525134ccd67d8d9e93418e8d84316c8433"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.039986 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e23b81b09632204c4f11d90ffbfb0953ae290a2f4cca9f4e2a971d58e3fe7f63"} Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.040085 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.040932 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.040963 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.040975 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:29 crc kubenswrapper[4956]: W1211 21:48:29.083087 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:29 crc kubenswrapper[4956]: E1211 21:48:29.083176 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:29 crc kubenswrapper[4956]: W1211 21:48:29.257486 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:29 crc kubenswrapper[4956]: E1211 21:48:29.257562 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:29 crc kubenswrapper[4956]: W1211 21:48:29.323944 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:29 crc kubenswrapper[4956]: E1211 21:48:29.324044 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:29 crc kubenswrapper[4956]: W1211 21:48:29.331154 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:29 crc kubenswrapper[4956]: E1211 21:48:29.331513 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:29 crc kubenswrapper[4956]: E1211 21:48:29.367615 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="1.6s" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.583576 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.585013 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.585051 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.585067 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.585097 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:29 crc kubenswrapper[4956]: E1211 21:48:29.585613 4956 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.83:6443: connect: connection refused" node="crc" Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.961992 4956 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.964135 4956 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 19:12:57.632313915 +0000 UTC Dec 11 21:48:29 crc kubenswrapper[4956]: I1211 21:48:29.964194 4956 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 333h24m27.668125021s for next certificate rotation Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.047693 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.047757 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.047783 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.047899 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.048988 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.049021 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.049031 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.050904 4956 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="433cb712b84499fb96324cdca7d9f3495c51176825c3d62f13ae59dfbd7926e6" exitCode=0 Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.050954 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"433cb712b84499fb96324cdca7d9f3495c51176825c3d62f13ae59dfbd7926e6"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.051040 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.051738 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.051763 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.051819 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.056677 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.056721 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.056742 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.056785 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.058011 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f544cad46d1313a6b2238591bbae146825c7360a076c95bce0ab7eb3bdafa848"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.058135 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.061298 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.061341 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.061353 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.065146 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.065203 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.065220 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9"} Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.065339 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.066201 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.066243 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.066255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.100075 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 21:48:30 crc kubenswrapper[4956]: E1211 21:48:30.101678 4956 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.83:6443: connect: connection refused" logger="UnhandledError" Dec 11 21:48:30 crc kubenswrapper[4956]: E1211 21:48:30.187917 4956 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.83:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188047891186eb25 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 21:48:27.959380773 +0000 UTC m=+0.403758983,LastTimestamp:2025-12-11 21:48:27.959380773 +0000 UTC m=+0.403758983,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.778322 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:30 crc kubenswrapper[4956]: I1211 21:48:30.961709 4956 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.83:6443: connect: connection refused Dec 11 21:48:30 crc kubenswrapper[4956]: E1211 21:48:30.968418 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="3.2s" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.104371 4956 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0d38fa29d6b995a224cb6f1c3f5729d110e171732ddbe51bdd5e62a09ff3821c" exitCode=0 Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.104444 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0d38fa29d6b995a224cb6f1c3f5729d110e171732ddbe51bdd5e62a09ff3821c"} Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.104625 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.105365 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.105410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.105428 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.107631 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.107624 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41"} Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.107644 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.116126 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.116169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.116179 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.116169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.116283 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.116295 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.186226 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.187372 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.187439 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.187478 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:31 crc kubenswrapper[4956]: I1211 21:48:31.187518 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.114892 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"592b2c1c228ee9581b0bfa4ac72e618af20756a4e2f595945106d4b37f5df465"} Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.114948 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"20667c6ee635baf8885aff1c3846ee4eac15b8f99216c271a752f914b9234f2f"} Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.114967 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d6cbd49442253079dfaef298a9d6e1130f40f812cbd46e10a60cc62778452b8a"} Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.115032 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.115040 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.115253 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.117014 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.117051 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.117096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.117120 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.117062 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.117177 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.465534 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.480183 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:32 crc kubenswrapper[4956]: I1211 21:48:32.769157 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.124849 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b56f9564355697204a05ef3094696a0402f156b25b2f7b6b6ec2f6acc7d856e6"} Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.124937 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"285fb36693ebdb30f5a611ec43ffe2df9423f9091904874a93f93a83798b70e5"} Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.124959 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.125044 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.125082 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126124 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126737 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126847 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126876 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126805 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126977 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.126997 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.127181 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.127213 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:33 crc kubenswrapper[4956]: I1211 21:48:33.127230 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.128095 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.129077 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.162058 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.162109 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.162133 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.163750 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.163812 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.163825 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:34 crc kubenswrapper[4956]: I1211 21:48:34.216173 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.014640 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.015319 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.015411 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.017346 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.017407 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.017423 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.186240 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.187202 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.188943 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.189042 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.189064 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:35 crc kubenswrapper[4956]: I1211 21:48:35.735710 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.134152 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.139410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.139479 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.139514 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.200166 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.200396 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.201938 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.201972 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:36 crc kubenswrapper[4956]: I1211 21:48:36.201983 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:37 crc kubenswrapper[4956]: I1211 21:48:37.251903 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:48:37 crc kubenswrapper[4956]: I1211 21:48:37.252143 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:37 crc kubenswrapper[4956]: I1211 21:48:37.254005 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:37 crc kubenswrapper[4956]: I1211 21:48:37.254057 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:37 crc kubenswrapper[4956]: I1211 21:48:37.254076 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:38 crc kubenswrapper[4956]: E1211 21:48:38.083301 4956 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 21:48:38 crc kubenswrapper[4956]: I1211 21:48:38.611135 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:38 crc kubenswrapper[4956]: I1211 21:48:38.611391 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:38 crc kubenswrapper[4956]: I1211 21:48:38.613196 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:38 crc kubenswrapper[4956]: I1211 21:48:38.613284 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:38 crc kubenswrapper[4956]: I1211 21:48:38.613303 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:38 crc kubenswrapper[4956]: I1211 21:48:38.619008 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:39 crc kubenswrapper[4956]: I1211 21:48:39.150089 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:39 crc kubenswrapper[4956]: I1211 21:48:39.151428 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:39 crc kubenswrapper[4956]: I1211 21:48:39.151478 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:39 crc kubenswrapper[4956]: I1211 21:48:39.151496 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:39 crc kubenswrapper[4956]: I1211 21:48:39.569638 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:40 crc kubenswrapper[4956]: I1211 21:48:40.152701 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:40 crc kubenswrapper[4956]: I1211 21:48:40.154324 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:40 crc kubenswrapper[4956]: I1211 21:48:40.154366 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:40 crc kubenswrapper[4956]: I1211 21:48:40.154387 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:41 crc kubenswrapper[4956]: W1211 21:48:41.084589 4956 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 21:48:41 crc kubenswrapper[4956]: I1211 21:48:41.084750 4956 trace.go:236] Trace[269089792]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 21:48:31.082) (total time: 10002ms): Dec 11 21:48:41 crc kubenswrapper[4956]: Trace[269089792]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (21:48:41.084) Dec 11 21:48:41 crc kubenswrapper[4956]: Trace[269089792]: [10.002021136s] [10.002021136s] END Dec 11 21:48:41 crc kubenswrapper[4956]: E1211 21:48:41.084814 4956 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 21:48:41 crc kubenswrapper[4956]: E1211 21:48:41.188482 4956 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 11 21:48:41 crc kubenswrapper[4956]: I1211 21:48:41.486165 4956 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 21:48:41 crc kubenswrapper[4956]: I1211 21:48:41.486317 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 21:48:41 crc kubenswrapper[4956]: I1211 21:48:41.491061 4956 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 21:48:41 crc kubenswrapper[4956]: I1211 21:48:41.491135 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 21:48:42 crc kubenswrapper[4956]: I1211 21:48:42.570211 4956 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 21:48:42 crc kubenswrapper[4956]: I1211 21:48:42.570276 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 21:48:44 crc kubenswrapper[4956]: I1211 21:48:44.389459 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:44 crc kubenswrapper[4956]: I1211 21:48:44.391052 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:44 crc kubenswrapper[4956]: I1211 21:48:44.391128 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:44 crc kubenswrapper[4956]: I1211 21:48:44.391152 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:44 crc kubenswrapper[4956]: I1211 21:48:44.391190 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:44 crc kubenswrapper[4956]: E1211 21:48:44.395331 4956 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.019802 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.019965 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.021200 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.021233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.021245 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.024753 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.165660 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.166623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.166654 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.166663 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.759988 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.760142 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.761174 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.761220 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.761233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:45 crc kubenswrapper[4956]: I1211 21:48:45.773610 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.168348 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.169310 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.169363 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.169380 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:46 crc kubenswrapper[4956]: E1211 21:48:46.476868 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.478220 4956 trace.go:236] Trace[1153011977]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 21:48:31.698) (total time: 14779ms): Dec 11 21:48:46 crc kubenswrapper[4956]: Trace[1153011977]: ---"Objects listed" error: 14779ms (21:48:46.478) Dec 11 21:48:46 crc kubenswrapper[4956]: Trace[1153011977]: [14.779955534s] [14.779955534s] END Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.478259 4956 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.494232 4956 trace.go:236] Trace[864493367]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 21:48:31.687) (total time: 14806ms): Dec 11 21:48:46 crc kubenswrapper[4956]: Trace[864493367]: ---"Objects listed" error: 14806ms (21:48:46.494) Dec 11 21:48:46 crc kubenswrapper[4956]: Trace[864493367]: [14.806816847s] [14.806816847s] END Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.494284 4956 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.506759 4956 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.507272 4956 trace.go:236] Trace[140248036]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 21:48:31.831) (total time: 14675ms): Dec 11 21:48:46 crc kubenswrapper[4956]: Trace[140248036]: ---"Objects listed" error: 14675ms (21:48:46.507) Dec 11 21:48:46 crc kubenswrapper[4956]: Trace[140248036]: [14.675616615s] [14.675616615s] END Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.507297 4956 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.507444 4956 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.521133 4956 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:36974->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.521202 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:36974->192.168.126.11:17697: read: connection reset by peer" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.521550 4956 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.521582 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.522044 4956 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.522112 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.533125 4956 csr.go:261] certificate signing request csr-jn5zz is approved, waiting to be issued Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.553501 4956 csr.go:257] certificate signing request csr-jn5zz is issued Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.768363 4956 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.964143 4956 apiserver.go:52] "Watching apiserver" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.966918 4956 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.967211 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.967842 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.968037 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.968264 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.968688 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.968866 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:46 crc kubenswrapper[4956]: E1211 21:48:46.968870 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.969153 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:46 crc kubenswrapper[4956]: E1211 21:48:46.969533 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:46 crc kubenswrapper[4956]: E1211 21:48:46.969759 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.970173 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.970173 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.970578 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.970676 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.970912 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.971319 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.975418 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.975714 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 21:48:46 crc kubenswrapper[4956]: I1211 21:48:46.975900 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.005901 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.026941 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.038347 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.050739 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.064826 4956 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.073507 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.098829 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134861 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134904 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134920 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134937 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134953 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134971 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.134989 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135004 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135019 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135033 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135050 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135065 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135081 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135094 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135110 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135127 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135141 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135158 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135213 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135232 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135247 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135261 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135280 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135295 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135310 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135299 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135325 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135390 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135413 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135443 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135463 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135478 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135493 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135507 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135524 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135558 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135630 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135664 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135681 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135695 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135726 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135740 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135754 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135790 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135813 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135832 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135853 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135869 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135893 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135908 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135923 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135938 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135953 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135967 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135982 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135997 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136011 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136031 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136045 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136060 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136075 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136092 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136107 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136122 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136137 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136151 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136166 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136181 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136196 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136211 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136410 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136434 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136468 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136492 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136514 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136536 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136559 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136581 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136603 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136625 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136650 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135436 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135569 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.135930 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136013 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136041 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136201 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136475 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136562 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136649 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136727 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137048 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137166 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137247 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.137254 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:48:47.637232763 +0000 UTC m=+20.081610983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137322 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137458 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137474 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137495 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137727 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137794 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137813 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138018 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138076 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138121 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138240 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138250 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138454 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138544 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138681 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138689 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.138802 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.139090 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.143560 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.145226 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.145268 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.145444 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.145500 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.145642 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.145873 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146141 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146333 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146527 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146729 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.136674 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146826 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146860 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146886 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146907 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146928 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146952 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146973 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146992 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147015 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147035 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147054 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147075 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147097 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147118 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147140 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147165 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147187 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147210 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147234 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147255 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147277 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147299 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147320 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147341 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147388 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147405 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147422 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147438 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147454 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147469 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147485 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147499 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147517 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147535 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147550 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147564 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147579 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147594 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147612 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147627 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147641 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147656 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147671 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147686 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147701 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147720 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147742 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147779 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147800 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147817 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147840 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.147861 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148043 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148199 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.137265 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146265 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148264 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148303 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.146442 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148451 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148470 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148496 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148514 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148524 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148532 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148585 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148608 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148629 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148645 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148671 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148818 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148843 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148860 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148880 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148897 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148914 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148930 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148946 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148962 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148979 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.148996 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149011 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149028 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149044 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149060 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149076 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149092 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149109 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149123 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149145 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149169 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149192 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149216 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149238 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149262 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149285 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149310 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149334 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149357 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149398 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149421 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149443 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149468 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149491 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149515 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149542 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149565 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149588 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149611 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149636 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149659 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149682 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149706 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149730 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.149753 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150803 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150845 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150871 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150896 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150925 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150954 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150979 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151004 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151054 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151086 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151116 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.152143 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.152170 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.152189 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.155320 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156838 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156919 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156960 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156995 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157035 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157075 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157114 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157242 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157266 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157282 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157306 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157321 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157337 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157352 4956 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157373 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157388 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157402 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157423 4956 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157438 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157452 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157467 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157489 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157503 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157517 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157531 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157550 4956 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157565 4956 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157580 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157600 4956 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157616 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157631 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157645 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157663 4956 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157677 4956 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157690 4956 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157703 4956 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157722 4956 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157737 4956 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157752 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157786 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157808 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157823 4956 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157838 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157856 4956 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157871 4956 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157886 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157901 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157929 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157944 4956 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157958 4956 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157972 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157989 4956 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158005 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158021 4956 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158040 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158054 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158069 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158082 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158101 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.159078 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150595 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150928 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.150978 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151077 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151216 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151408 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151562 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151964 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.151975 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.155899 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156528 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156561 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156636 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156654 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156791 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156802 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.156825 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157477 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157661 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.157846 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.158359 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.159279 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.161094 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.165610 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.162032 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.162092 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.162311 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.162521 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.162543 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.162828 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.163080 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.163133 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.163438 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.163604 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.164975 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.165223 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.165257 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.165824 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.164528 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.168056 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.168322 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.168332 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.168447 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.165897 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.171424 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.171649 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.171941 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172073 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172206 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172215 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172372 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172561 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172575 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172705 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172778 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172893 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.172995 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173032 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173141 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173154 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173229 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173271 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173405 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173455 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173624 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173679 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173762 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.173976 4956 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.174131 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.174228 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.174563 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.174801 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.174855 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.174973 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.175016 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.175337 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.175592 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.176167 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.177014 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.177495 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.167427 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.177948 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.178136 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.178387 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.178634 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:47.678614113 +0000 UTC m=+20.122992263 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.178964 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.179590 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.179624 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.179889 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.179895 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.180068 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.180121 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.180202 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.180426 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.180583 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:47.680553783 +0000 UTC m=+20.124931933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.180752 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.181318 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.181631 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.181849 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.182103 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.182105 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.182434 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.182576 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.183247 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.184964 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.185257 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.185440 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.185469 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.185762 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.186299 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.186452 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.186863 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.187930 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.188239 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.192050 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.192357 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.192722 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193043 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193141 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193187 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193503 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193554 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193590 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.193880 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.194087 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.194306 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.194559 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.194679 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.194933 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.194986 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.195263 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.195364 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.195904 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.195917 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.196567 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.196683 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.196781 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.196945 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.197028 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.199071 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.199106 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.199118 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.199167 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:47.699150568 +0000 UTC m=+20.143528718 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.203597 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.203876 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.208064 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.208598 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.209721 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.209997 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.210340 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.210385 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.211096 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.211598 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.211715 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.215993 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.216185 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.218497 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.218534 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.218549 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.218610 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:47.718589045 +0000 UTC m=+20.162967195 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.226532 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.227120 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.227952 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41" exitCode=255 Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.228024 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41"} Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.232049 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.233881 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.241725 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262080 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262126 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262168 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262178 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262186 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262195 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262204 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262212 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262221 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262229 4956 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262237 4956 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262245 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262253 4956 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262262 4956 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262270 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262278 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262285 4956 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262293 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262302 4956 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262310 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262317 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262325 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262333 4956 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262343 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262350 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262359 4956 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262366 4956 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262374 4956 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262381 4956 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262389 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262378 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262431 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262396 4956 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262497 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262513 4956 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262524 4956 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262536 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262549 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262577 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262588 4956 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262599 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262610 4956 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262620 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262631 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262660 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262671 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262682 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262695 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262705 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262715 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262742 4956 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262752 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262793 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262814 4956 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262826 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262838 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262849 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262878 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262892 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262903 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262916 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262927 4956 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262971 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262983 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.262994 4956 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263005 4956 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263023 4956 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263034 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263062 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263074 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263085 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263096 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263108 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263119 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263130 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263140 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263150 4956 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263161 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263171 4956 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263182 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263210 4956 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263223 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263235 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263254 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263296 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263307 4956 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263318 4956 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263328 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263359 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263372 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263383 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263393 4956 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263423 4956 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263452 4956 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263478 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263488 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263505 4956 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263515 4956 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263535 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263548 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263562 4956 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263573 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263583 4956 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263594 4956 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263605 4956 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263614 4956 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263625 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263644 4956 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263658 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263668 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263678 4956 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263689 4956 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263699 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263711 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263722 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263734 4956 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263745 4956 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263756 4956 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263819 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263832 4956 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263842 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263854 4956 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263866 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263877 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263889 4956 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263901 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263913 4956 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263923 4956 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263934 4956 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263945 4956 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263956 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263966 4956 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263978 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.263989 4956 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264001 4956 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264012 4956 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264025 4956 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264036 4956 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264046 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264056 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264066 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264076 4956 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264086 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264096 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264105 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.264117 4956 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.270090 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.273118 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.277380 4956 scope.go:117] "RemoveContainer" containerID="2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.289475 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.291279 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.301896 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.319286 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.343291 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-7f1273ae8597e8006c259183bf207d56e14b5f856ad438abbd547d73bceb06e3 WatchSource:0}: Error finding container 7f1273ae8597e8006c259183bf207d56e14b5f856ad438abbd547d73bceb06e3: Status 404 returned error can't find the container with id 7f1273ae8597e8006c259183bf207d56e14b5f856ad438abbd547d73bceb06e3 Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.372924 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.393798 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.411746 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.431697 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.555127 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-11 21:43:46 +0000 UTC, rotation deadline is 2026-10-21 12:57:42.811082248 +0000 UTC Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.555496 4956 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7527h8m55.255590639s for next certificate rotation Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.586881 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-h6mx2"] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.587260 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.587415 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-cz2dx"] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.588049 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.589317 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.589624 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.590104 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.590211 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.590236 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.590623 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.591852 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.597102 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.603419 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.615400 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.628613 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.645816 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.657196 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.666662 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.670567 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.670735 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.670716653 +0000 UTC m=+21.115094803 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.675926 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.682706 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.692757 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.704547 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.727553 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.755995 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771111 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771155 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771181 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771215 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf61c63b-b06c-4f51-add2-aefe57de751a-mcd-auth-proxy-config\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771243 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771249 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771278 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771297 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771308 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771336 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.771313926 +0000 UTC m=+21.215692076 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771355 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.771347177 +0000 UTC m=+21.215725327 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771386 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771417 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.771402879 +0000 UTC m=+21.215781029 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771265 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwnrx\" (UniqueName: \"kubernetes.io/projected/cf61c63b-b06c-4f51-add2-aefe57de751a-kube-api-access-bwnrx\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771456 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/afc0240e-109a-48e1-b0fe-4ca9386fce91-hosts-file\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771476 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn6dr\" (UniqueName: \"kubernetes.io/projected/afc0240e-109a-48e1-b0fe-4ca9386fce91-kube-api-access-bn6dr\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771460 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771499 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771513 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.771553 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.771544552 +0000 UTC m=+21.215922792 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771501 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/cf61c63b-b06c-4f51-add2-aefe57de751a-rootfs\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.771591 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf61c63b-b06c-4f51-add2-aefe57de751a-proxy-tls\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.774099 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.796930 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.809540 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.832267 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.842067 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872628 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf61c63b-b06c-4f51-add2-aefe57de751a-mcd-auth-proxy-config\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872679 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwnrx\" (UniqueName: \"kubernetes.io/projected/cf61c63b-b06c-4f51-add2-aefe57de751a-kube-api-access-bwnrx\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872703 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/afc0240e-109a-48e1-b0fe-4ca9386fce91-hosts-file\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872727 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn6dr\" (UniqueName: \"kubernetes.io/projected/afc0240e-109a-48e1-b0fe-4ca9386fce91-kube-api-access-bn6dr\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872755 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/cf61c63b-b06c-4f51-add2-aefe57de751a-rootfs\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872798 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf61c63b-b06c-4f51-add2-aefe57de751a-proxy-tls\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.872839 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/afc0240e-109a-48e1-b0fe-4ca9386fce91-hosts-file\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.873031 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/cf61c63b-b06c-4f51-add2-aefe57de751a-rootfs\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.873521 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf61c63b-b06c-4f51-add2-aefe57de751a-mcd-auth-proxy-config\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.874446 4956 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.874617 4956 reflector.go:484] object-"openshift-machine-config-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.874657 4956 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.874684 4956 reflector.go:484] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.874707 4956 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.874729 4956 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.874752 4956 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.874901 4956 projected.go:194] Error preparing data for projected volume kube-api-access-bwnrx for pod openshift-machine-config-operator/machine-config-daemon-h6mx2: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/serviceaccounts/machine-config-daemon/token": read tcp 38.102.83.83:51726->38.102.83.83:6443: use of closed network connection Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.874955 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf61c63b-b06c-4f51-add2-aefe57de751a-kube-api-access-bwnrx podName:cf61c63b-b06c-4f51-add2-aefe57de751a nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.374934958 +0000 UTC m=+20.819313178 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-bwnrx" (UniqueName: "kubernetes.io/projected/cf61c63b-b06c-4f51-add2-aefe57de751a-kube-api-access-bwnrx") pod "machine-config-daemon-h6mx2" (UID: "cf61c63b-b06c-4f51-add2-aefe57de751a") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/serviceaccounts/machine-config-daemon/token": read tcp 38.102.83.83:51726->38.102.83.83:6443: use of closed network connection Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875109 4956 reflector.go:484] object-"openshift-machine-config-operator"/"proxy-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"proxy-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.875244 4956 projected.go:194] Error preparing data for projected volume kube-api-access-bn6dr for pod openshift-dns/node-resolver-cz2dx: failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/serviceaccounts/node-resolver/token": read tcp 38.102.83.83:51726->38.102.83.83:6443: use of closed network connection Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875290 4956 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: E1211 21:48:47.875300 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/afc0240e-109a-48e1-b0fe-4ca9386fce91-kube-api-access-bn6dr podName:afc0240e-109a-48e1-b0fe-4ca9386fce91 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:48.375281327 +0000 UTC m=+20.819659477 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-bn6dr" (UniqueName: "kubernetes.io/projected/afc0240e-109a-48e1-b0fe-4ca9386fce91-kube-api-access-bn6dr") pod "node-resolver-cz2dx" (UID: "afc0240e-109a-48e1-b0fe-4ca9386fce91") : failed to fetch token: Post "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/serviceaccounts/node-resolver/token": read tcp 38.102.83.83:51726->38.102.83.83:6443: use of closed network connection Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875327 4956 reflector.go:484] object-"openshift-machine-config-operator"/"kube-rbac-proxy": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-rbac-proxy": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875492 4956 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875526 4956 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875548 4956 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875572 4956 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875593 4956 reflector.go:484] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875615 4956 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875625 4956 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: W1211 21:48:47.875903 4956 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.882140 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf61c63b-b06c-4f51-add2-aefe57de751a-proxy-tls\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.900276 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-tmhkw"] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.900951 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-v52ql"] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.901081 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.901759 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.903978 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.904300 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.904469 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.911002 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.911144 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.911345 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.911373 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.911513 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.911837 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.912189 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.912322 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.912327 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.922149 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.922372 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-p8slf"] Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.922739 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-p8slf" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.924034 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.924486 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.940527 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.960075 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.970011 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:47 crc kubenswrapper[4956]: I1211 21:48:47.989598 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.001483 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.014786 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.028142 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.028897 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.029580 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.030802 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.031465 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.032602 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.033132 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.033711 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.034624 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.035369 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.036537 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.037125 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.038289 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.038954 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.039436 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.039557 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.040468 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.041039 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.041999 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.042455 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.043173 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.044203 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.044758 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.046064 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.046584 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.047660 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.048263 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.049053 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.050370 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.050888 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.051835 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.052313 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.053166 4956 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.053267 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.054835 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.055727 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.056171 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.056579 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.057936 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.058680 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.059697 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.060456 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.061526 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.062020 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.062981 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.063642 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.064629 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.065084 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.065924 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.066434 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.067527 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.068057 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.068838 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.069277 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.070136 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.070675 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.071479 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076721 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-cnibin\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076795 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-env-overrides\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076826 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-script-lib\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076847 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-os-release\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076887 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpf2b\" (UniqueName: \"kubernetes.io/projected/3f5c3105-d748-4563-b3f7-a566d31a3031-kube-api-access-bpf2b\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076918 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/59601647-5a77-4d78-9821-73873f2cec46-cni-binary-copy\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076941 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-systemd-units\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076964 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-var-lib-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076978 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-log-socket\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.076995 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-netd\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077057 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-socket-dir-parent\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077093 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-os-release\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077107 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-system-cni-dir\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077120 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-netns\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077141 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-system-cni-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077175 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-cni-bin\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077218 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-hostroot\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077237 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-multus-certs\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077263 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-cni-multus\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077286 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-kubelet\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077303 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovn-node-metrics-cert\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077319 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n6k6\" (UniqueName: \"kubernetes.io/projected/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-kube-api-access-9n6k6\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077336 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-ovn-kubernetes\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077351 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-kubelet\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077368 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-conf-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077383 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-tuning-conf-dir\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077416 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f5c3105-d748-4563-b3f7-a566d31a3031-cni-binary-copy\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077449 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-systemd\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077472 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-netns\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077492 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-slash\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077517 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-node-log\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077538 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-cni-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077666 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-k8s-cni-cncf-io\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077744 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-etc-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077816 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-etc-kubernetes\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077865 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-config\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077916 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mktk\" (UniqueName: \"kubernetes.io/projected/59601647-5a77-4d78-9821-73873f2cec46-kube-api-access-5mktk\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077941 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-ovn\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.077991 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-daemon-config\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.078025 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-bin\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.078047 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.078074 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-cnibin\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.078111 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/59601647-5a77-4d78-9821-73873f2cec46-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.078132 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.093516 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.108196 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.153960 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.173809 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179119 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-env-overrides\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179369 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-script-lib\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179473 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-os-release\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179579 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpf2b\" (UniqueName: \"kubernetes.io/projected/3f5c3105-d748-4563-b3f7-a566d31a3031-kube-api-access-bpf2b\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179662 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/59601647-5a77-4d78-9821-73873f2cec46-cni-binary-copy\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179786 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-systemd-units\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179878 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-systemd-units\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.179829 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-os-release\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180126 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-socket-dir-parent\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180295 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-var-lib-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180423 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-log-socket\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180557 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-netd\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180684 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-os-release\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180785 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-system-cni-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180833 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-env-overrides\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180627 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/59601647-5a77-4d78-9821-73873f2cec46-cni-binary-copy\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180652 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-netd\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180674 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-script-lib\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180257 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-socket-dir-parent\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180750 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-os-release\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180523 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-log-socket\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180838 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-system-cni-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.180392 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-var-lib-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181185 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-cni-bin\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181265 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-system-cni-dir\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181338 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-system-cni-dir\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181204 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-cni-bin\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181547 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-netns\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181693 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-cni-multus\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181865 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-hostroot\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181985 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-multus-certs\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182092 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-kubelet\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182215 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovn-node-metrics-cert\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182308 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n6k6\" (UniqueName: \"kubernetes.io/projected/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-kube-api-access-9n6k6\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182666 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-conf-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182825 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-tuning-conf-dir\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182192 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-kubelet\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181993 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-hostroot\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181798 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-cni-multus\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182043 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-multus-certs\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182791 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-conf-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.181658 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-netns\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.182989 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-ovn-kubernetes\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183162 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-kubelet\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183194 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f5c3105-d748-4563-b3f7-a566d31a3031-cni-binary-copy\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183215 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-systemd\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183236 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-slash\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183255 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-netns\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183275 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-k8s-cni-cncf-io\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183298 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-etc-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183301 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-slash\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183325 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-node-log\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183325 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-systemd\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183347 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-cni-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183354 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-k8s-cni-cncf-io\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183371 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-etc-kubernetes\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183378 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-run-netns\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183397 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mktk\" (UniqueName: \"kubernetes.io/projected/59601647-5a77-4d78-9821-73873f2cec46-kube-api-access-5mktk\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183399 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-etc-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183271 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-host-var-lib-kubelet\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183421 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-ovn\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183432 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-cni-dir\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183444 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-etc-kubernetes\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183444 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-config\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183461 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-node-log\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183527 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-daemon-config\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183544 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/59601647-5a77-4d78-9821-73873f2cec46-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183561 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183576 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-bin\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183591 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183607 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-cnibin\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183633 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-cnibin\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183693 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-openvswitch\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183730 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f5c3105-d748-4563-b3f7-a566d31a3031-cnibin\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183754 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-bin\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183793 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183813 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-cnibin\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183833 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-ovn\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.183877 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f5c3105-d748-4563-b3f7-a566d31a3031-cni-binary-copy\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.184025 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-config\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.184197 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f5c3105-d748-4563-b3f7-a566d31a3031-multus-daemon-config\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.184425 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/59601647-5a77-4d78-9821-73873f2cec46-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.184437 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/59601647-5a77-4d78-9821-73873f2cec46-tuning-conf-dir\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.184575 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-ovn-kubernetes\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.187245 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovn-node-metrics-cert\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.190973 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.200382 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpf2b\" (UniqueName: \"kubernetes.io/projected/3f5c3105-d748-4563-b3f7-a566d31a3031-kube-api-access-bpf2b\") pod \"multus-p8slf\" (UID: \"3f5c3105-d748-4563-b3f7-a566d31a3031\") " pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.214294 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.214808 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mktk\" (UniqueName: \"kubernetes.io/projected/59601647-5a77-4d78-9821-73873f2cec46-kube-api-access-5mktk\") pod \"multus-additional-cni-plugins-tmhkw\" (UID: \"59601647-5a77-4d78-9821-73873f2cec46\") " pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.218454 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n6k6\" (UniqueName: \"kubernetes.io/projected/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-kube-api-access-9n6k6\") pod \"ovnkube-node-v52ql\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.226303 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.231335 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.231448 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ea478d20dc001a65d8d7eee430ab0cf87e688ab7e8cca73360c50d62545212df"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.231674 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.233045 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.233230 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.233242 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c0a9db6c0b6d9a9f10a0211362ee747346b45cc893e6c64e1959cb112fe1280a"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.233972 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7f1273ae8597e8006c259183bf207d56e14b5f856ad438abbd547d73bceb06e3"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.235364 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.236685 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc"} Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.236988 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.239421 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.240568 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:48 crc kubenswrapper[4956]: W1211 21:48:48.246063 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59601647_5a77_4d78_9821_73873f2cec46.slice/crio-2f992e75b9fac7c544828d16fe79148c7594db6138e96dcc0bffcc3e7b06c62e WatchSource:0}: Error finding container 2f992e75b9fac7c544828d16fe79148c7594db6138e96dcc0bffcc3e7b06c62e: Status 404 returned error can't find the container with id 2f992e75b9fac7c544828d16fe79148c7594db6138e96dcc0bffcc3e7b06c62e Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.246927 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-p8slf" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.252388 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: W1211 21:48:48.254788 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5a2674b_3cd8_4d21_bdba_2e3d0bfcab30.slice/crio-f9b036d90f9d8482bffd0bf02cb9cd9693e51f4eddce514aba0fbceb67a7dad4 WatchSource:0}: Error finding container f9b036d90f9d8482bffd0bf02cb9cd9693e51f4eddce514aba0fbceb67a7dad4: Status 404 returned error can't find the container with id f9b036d90f9d8482bffd0bf02cb9cd9693e51f4eddce514aba0fbceb67a7dad4 Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.260341 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: W1211 21:48:48.261340 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f5c3105_d748_4563_b3f7_a566d31a3031.slice/crio-dee3ad607aa478703992e9eb96bca8ab34b58359469568d6c426c812413aabb4 WatchSource:0}: Error finding container dee3ad607aa478703992e9eb96bca8ab34b58359469568d6c426c812413aabb4: Status 404 returned error can't find the container with id dee3ad607aa478703992e9eb96bca8ab34b58359469568d6c426c812413aabb4 Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.272641 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.283409 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.306273 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.321163 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.358451 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.385330 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwnrx\" (UniqueName: \"kubernetes.io/projected/cf61c63b-b06c-4f51-add2-aefe57de751a-kube-api-access-bwnrx\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.385395 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn6dr\" (UniqueName: \"kubernetes.io/projected/afc0240e-109a-48e1-b0fe-4ca9386fce91-kube-api-access-bn6dr\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.400846 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.424394 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwnrx\" (UniqueName: \"kubernetes.io/projected/cf61c63b-b06c-4f51-add2-aefe57de751a-kube-api-access-bwnrx\") pod \"machine-config-daemon-h6mx2\" (UID: \"cf61c63b-b06c-4f51-add2-aefe57de751a\") " pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.425030 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn6dr\" (UniqueName: \"kubernetes.io/projected/afc0240e-109a-48e1-b0fe-4ca9386fce91-kube-api-access-bn6dr\") pod \"node-resolver-cz2dx\" (UID: \"afc0240e-109a-48e1-b0fe-4ca9386fce91\") " pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.431290 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.444763 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.461262 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.475141 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.492044 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.507214 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.522602 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.533742 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.539921 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cz2dx" Dec 11 21:48:48 crc kubenswrapper[4956]: W1211 21:48:48.551823 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf61c63b_b06c_4f51_add2_aefe57de751a.slice/crio-7c2d4cf8727000a07995bbfc258cc735715b0956137cb273febf0bde12d65d15 WatchSource:0}: Error finding container 7c2d4cf8727000a07995bbfc258cc735715b0956137cb273febf0bde12d65d15: Status 404 returned error can't find the container with id 7c2d4cf8727000a07995bbfc258cc735715b0956137cb273febf0bde12d65d15 Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.560080 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.578823 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.597226 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.613476 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.658501 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.676970 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.689474 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.689617 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:48:50.689575269 +0000 UTC m=+23.133953419 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.715654 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.759697 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.766855 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.787067 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.790467 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.790579 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.790661 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.790738 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.790664 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.790963 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:50.790947722 +0000 UTC m=+23.235325872 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.790751 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.790758 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791195 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791212 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.790836 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791242 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791255 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791153 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:50.791144377 +0000 UTC m=+23.235522527 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791300 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:50.791284592 +0000 UTC m=+23.235662742 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:48 crc kubenswrapper[4956]: E1211 21:48:48.791310 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:50.791305502 +0000 UTC m=+23.235683652 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.827261 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.846686 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.866507 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.894130 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.907170 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.955962 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:48 crc kubenswrapper[4956]: I1211 21:48:48.993950 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.020491 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.020523 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.020491 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:49 crc kubenswrapper[4956]: E1211 21:48:49.020681 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:49 crc kubenswrapper[4956]: E1211 21:48:49.020903 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:49 crc kubenswrapper[4956]: E1211 21:48:49.021079 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.040721 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.046746 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.085960 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.126715 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.135886 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.145869 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.210012 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.237709 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.240651 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.240692 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.240701 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"7c2d4cf8727000a07995bbfc258cc735715b0956137cb273febf0bde12d65d15"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.242040 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p8slf" event={"ID":"3f5c3105-d748-4563-b3f7-a566d31a3031","Type":"ContainerStarted","Data":"29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.242075 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p8slf" event={"ID":"3f5c3105-d748-4563-b3f7-a566d31a3031","Type":"ContainerStarted","Data":"dee3ad607aa478703992e9eb96bca8ab34b58359469568d6c426c812413aabb4"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.243107 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" exitCode=0 Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.243177 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.243355 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"f9b036d90f9d8482bffd0bf02cb9cd9693e51f4eddce514aba0fbceb67a7dad4"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.244913 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerStarted","Data":"e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.244969 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerStarted","Data":"2f992e75b9fac7c544828d16fe79148c7594db6138e96dcc0bffcc3e7b06c62e"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.246574 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cz2dx" event={"ID":"afc0240e-109a-48e1-b0fe-4ca9386fce91","Type":"ContainerStarted","Data":"e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.246601 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cz2dx" event={"ID":"afc0240e-109a-48e1-b0fe-4ca9386fce91","Type":"ContainerStarted","Data":"645bde0405b17f3d83a6842ef60f554d1f51a5524cbc3d0745656703a41cc2e0"} Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.256789 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.279746 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.285877 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.335413 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.376087 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.413841 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.426234 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.481518 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.486663 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.507498 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.527698 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.577174 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.578047 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.581294 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.595428 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.632965 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.681177 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.716942 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.759046 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.799542 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.836825 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.879609 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.921315 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.957850 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:49 crc kubenswrapper[4956]: I1211 21:48:49.995843 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:49Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.037169 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.076188 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.115399 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.154223 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.205492 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.232665 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.251445 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.251509 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.251521 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.251530 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.253251 4956 generic.go:334] "Generic (PLEG): container finished" podID="59601647-5a77-4d78-9821-73873f2cec46" containerID="e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2" exitCode=0 Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.253653 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerDied","Data":"e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2"} Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.276144 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.323012 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.368428 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.396745 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.435864 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.475718 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.523381 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.535712 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-7q7lq"] Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.536139 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.564735 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.567219 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.587716 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.607005 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.628416 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.673463 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.707097 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.707213 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d48b9a21-a626-4b43-9429-59287cc38e3d-host\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.707283 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d48b9a21-a626-4b43-9429-59287cc38e3d-serviceca\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.707305 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh7lf\" (UniqueName: \"kubernetes.io/projected/d48b9a21-a626-4b43-9429-59287cc38e3d-kube-api-access-nh7lf\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.707384 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:48:54.707369491 +0000 UTC m=+27.151747641 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.714786 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.753846 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.793415 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.796450 4956 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.798093 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.798123 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.798135 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.798228 4956 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807730 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh7lf\" (UniqueName: \"kubernetes.io/projected/d48b9a21-a626-4b43-9429-59287cc38e3d-kube-api-access-nh7lf\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807790 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d48b9a21-a626-4b43-9429-59287cc38e3d-host\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807820 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807844 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807868 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807889 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.807914 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d48b9a21-a626-4b43-9429-59287cc38e3d-serviceca\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808007 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808124 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:54.808104617 +0000 UTC m=+27.252482767 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808212 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808238 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808249 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808292 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:54.808278222 +0000 UTC m=+27.252656372 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808330 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808353 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:54.808346743 +0000 UTC m=+27.252724893 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.808378 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d48b9a21-a626-4b43-9429-59287cc38e3d-host\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808453 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808475 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808486 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.808518 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:48:54.808507727 +0000 UTC m=+27.252885927 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.809347 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d48b9a21-a626-4b43-9429-59287cc38e3d-serviceca\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.855372 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.889575 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh7lf\" (UniqueName: \"kubernetes.io/projected/d48b9a21-a626-4b43-9429-59287cc38e3d-kube-api-access-nh7lf\") pod \"node-ca-7q7lq\" (UID: \"d48b9a21-a626-4b43-9429-59287cc38e3d\") " pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.892926 4956 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.893101 4956 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.894340 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.894371 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.894379 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.894393 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.894402 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:50Z","lastTransitionTime":"2025-12-11T21:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.918534 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.921674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.921724 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.921740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.921758 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.921787 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:50Z","lastTransitionTime":"2025-12-11T21:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.934953 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.935179 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.938042 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.938198 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.938288 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.938375 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.938456 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:50Z","lastTransitionTime":"2025-12-11T21:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.955705 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.959094 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.959138 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.959151 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.959168 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.959180 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:50Z","lastTransitionTime":"2025-12-11T21:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.972947 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.976695 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.976741 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.976753 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.976789 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.976811 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:50Z","lastTransitionTime":"2025-12-11T21:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.979793 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.989170 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:50Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:50 crc kubenswrapper[4956]: E1211 21:48:50.989288 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.991169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.991208 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.991219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.991236 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:50 crc kubenswrapper[4956]: I1211 21:48:50.991247 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:50Z","lastTransitionTime":"2025-12-11T21:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.016875 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.021013 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.021043 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.021042 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:51 crc kubenswrapper[4956]: E1211 21:48:51.021155 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:51 crc kubenswrapper[4956]: E1211 21:48:51.021255 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:51 crc kubenswrapper[4956]: E1211 21:48:51.021342 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.052369 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.092994 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.093031 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.093042 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.093056 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.093065 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.100706 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.134284 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.158868 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-7q7lq" Dec 11 21:48:51 crc kubenswrapper[4956]: W1211 21:48:51.170822 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd48b9a21_a626_4b43_9429_59287cc38e3d.slice/crio-261f6f21794196907c853a51060f88acc87a637dca9db6f0d82fc8195bb4ebb5 WatchSource:0}: Error finding container 261f6f21794196907c853a51060f88acc87a637dca9db6f0d82fc8195bb4ebb5: Status 404 returned error can't find the container with id 261f6f21794196907c853a51060f88acc87a637dca9db6f0d82fc8195bb4ebb5 Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.176374 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.195815 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.195857 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.195871 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.195897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.195909 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.217797 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.258086 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.260198 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.261002 4956 generic.go:334] "Generic (PLEG): container finished" podID="59601647-5a77-4d78-9821-73873f2cec46" containerID="06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9" exitCode=0 Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.261051 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerDied","Data":"06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.265870 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.265906 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.267405 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-7q7lq" event={"ID":"d48b9a21-a626-4b43-9429-59287cc38e3d","Type":"ContainerStarted","Data":"261f6f21794196907c853a51060f88acc87a637dca9db6f0d82fc8195bb4ebb5"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.293578 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.298219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.298249 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.298260 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.298272 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.298281 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.334601 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.373570 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.400045 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.400119 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.400136 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.400150 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.400159 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.412446 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.459414 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.495275 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.502654 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.502697 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.502709 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.502724 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.502737 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.535344 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.573843 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.605311 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.605351 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.605361 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.605375 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.605385 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.616432 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.654575 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.694440 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.711118 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.711203 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.711226 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.711256 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.711282 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.734012 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.773480 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.813485 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.813536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.813545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.813559 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.813568 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.814920 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.856234 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.892751 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.915714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.915793 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.915810 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.915830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.915844 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:51Z","lastTransitionTime":"2025-12-11T21:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.934576 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:51 crc kubenswrapper[4956]: I1211 21:48:51.974358 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:51Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.015423 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.017746 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.017790 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.017799 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.017813 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.017823 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.062171 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.094794 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.120276 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.120342 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.120354 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.120372 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.120383 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.223536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.223581 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.223592 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.223607 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.223617 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.271626 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-7q7lq" event={"ID":"d48b9a21-a626-4b43-9429-59287cc38e3d","Type":"ContainerStarted","Data":"ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.274275 4956 generic.go:334] "Generic (PLEG): container finished" podID="59601647-5a77-4d78-9821-73873f2cec46" containerID="8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb" exitCode=0 Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.274331 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerDied","Data":"8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.288072 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.301941 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.314552 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.333931 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.335899 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.335930 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.335941 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.335958 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.335970 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.346993 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.361272 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.375174 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.412328 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.437874 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.438116 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.438124 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.438137 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.438146 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.453048 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.494947 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.534194 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.540579 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.540613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.540622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.540637 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.540647 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.574957 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.619858 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.642999 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.643046 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.643098 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.643124 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.643136 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.656357 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.698294 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.740257 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.744815 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.744861 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.744872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.744890 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.744904 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.778963 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.817122 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.847519 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.847566 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.847577 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.847595 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.847608 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.856911 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.895286 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.950278 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.950322 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.950331 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.950349 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.950360 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:52Z","lastTransitionTime":"2025-12-11T21:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.954991 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:52 crc kubenswrapper[4956]: I1211 21:48:52.974704 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.013019 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.020140 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.020190 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:53 crc kubenswrapper[4956]: E1211 21:48:53.020260 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.020309 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:53 crc kubenswrapper[4956]: E1211 21:48:53.020479 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:53 crc kubenswrapper[4956]: E1211 21:48:53.020592 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.052145 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.052193 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.052204 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.052221 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.052233 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.056641 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.098412 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.136213 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.154258 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.154294 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.154302 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.154318 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.154327 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.256394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.256435 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.256449 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.256464 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.256474 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.358218 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.358246 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.358255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.358267 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.358275 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.446242 4956 generic.go:334] "Generic (PLEG): container finished" podID="59601647-5a77-4d78-9821-73873f2cec46" containerID="7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554" exitCode=0 Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.446341 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerDied","Data":"7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.450220 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.452363 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.460426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.460479 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.460491 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.460509 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.460520 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.463907 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.479883 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.493339 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.510310 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.521527 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.543315 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.559445 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.564258 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.564332 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.564374 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.564394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.564406 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.578487 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.589349 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.598221 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.615023 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.655517 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.666356 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.666411 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.666421 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.666435 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.666445 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.694317 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.736268 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.769595 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.769635 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.769662 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.769677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.769686 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.779039 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:53Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.872462 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.872537 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.872572 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.872591 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.872606 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.974587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.974648 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.974659 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.974674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:53 crc kubenswrapper[4956]: I1211 21:48:53.974703 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:53Z","lastTransitionTime":"2025-12-11T21:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.078046 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.078076 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.078087 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.078102 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.078113 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.181274 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.181365 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.181390 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.181413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.181431 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.283722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.283834 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.283867 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.283899 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.283923 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.386473 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.386527 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.386542 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.386562 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.386577 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.460391 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerStarted","Data":"3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.473943 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.489086 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.489506 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.489524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.489534 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.489546 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.489555 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.501110 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.517733 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.539801 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.563408 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.582357 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.592572 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.592625 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.592641 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.592662 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.592677 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.596622 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.610527 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.625940 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.641293 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.653575 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.667730 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.679084 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:54Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.695011 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.695065 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.695076 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.695093 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.695105 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.745297 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.745699 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:49:02.745576012 +0000 UTC m=+35.189954212 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.797521 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.797562 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.797570 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.797585 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.797595 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.846673 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.846722 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.846748 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.846816 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.846896 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.846914 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.846949 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.846993 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.847032 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.847047 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.846956 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:02.846937045 +0000 UTC m=+35.291315195 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.846951 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.847158 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:02.84713852 +0000 UTC m=+35.291516680 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.847174 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.847176 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:02.847168751 +0000 UTC m=+35.291546921 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:54 crc kubenswrapper[4956]: E1211 21:48:54.847221 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:02.847211072 +0000 UTC m=+35.291589282 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.900063 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.900358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.900371 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.900390 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:54 crc kubenswrapper[4956]: I1211 21:48:54.900403 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:54Z","lastTransitionTime":"2025-12-11T21:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.002309 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.002358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.002371 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.002391 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.002402 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.021186 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.021230 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.021186 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:55 crc kubenswrapper[4956]: E1211 21:48:55.021328 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:55 crc kubenswrapper[4956]: E1211 21:48:55.021430 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:55 crc kubenswrapper[4956]: E1211 21:48:55.021502 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.104502 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.104553 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.104568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.104586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.104598 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.206513 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.206545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.206557 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.206573 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.206584 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.308970 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.309018 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.309026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.309041 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.309050 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.411889 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.411932 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.411947 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.411963 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.411972 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.469542 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.469915 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.473743 4956 generic.go:334] "Generic (PLEG): container finished" podID="59601647-5a77-4d78-9821-73873f2cec46" containerID="3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22" exitCode=0 Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.473801 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerDied","Data":"3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.490119 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.504908 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.513997 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.514024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.514036 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.514051 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.514062 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.515955 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.518112 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.526699 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.538860 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.555437 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.569270 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.581522 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.591717 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.604401 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.616613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.616647 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.616658 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.616694 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.616705 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.616737 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.630714 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.645880 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.664593 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.677296 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.689071 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.701733 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.712744 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.719258 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.719303 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.719312 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.719325 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.719334 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.724379 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.735676 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.746466 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.755853 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.769544 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.821457 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.821561 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.821583 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.821591 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.821605 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.821614 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.838688 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.866199 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.883654 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.895830 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:55Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.923703 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.923741 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.923752 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.923790 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:55 crc kubenswrapper[4956]: I1211 21:48:55.923803 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:55Z","lastTransitionTime":"2025-12-11T21:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.026507 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.026616 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.026658 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.026693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.026715 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.129939 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.129990 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.130003 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.130020 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.130032 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.232551 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.232597 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.232607 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.232622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.232632 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.335441 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.335501 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.335544 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.335586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.335621 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.439197 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.439248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.439259 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.439277 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.439290 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.476584 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.477089 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.510708 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.524408 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.541492 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.541573 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.541619 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.541647 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.541665 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.544420 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.564375 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.582107 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.600053 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.612823 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.627456 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.640411 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.643845 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.643884 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.643897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.643913 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.643925 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.654284 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.667850 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.679973 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.696227 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.713944 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.724930 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:56Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.747378 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.747423 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.747434 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.747452 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.747464 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.850156 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.850222 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.850235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.850252 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.850263 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.953636 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.953699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.953717 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.953739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:56 crc kubenswrapper[4956]: I1211 21:48:56.953756 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:56Z","lastTransitionTime":"2025-12-11T21:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.020186 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.020199 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:57 crc kubenswrapper[4956]: E1211 21:48:57.020624 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.020206 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:57 crc kubenswrapper[4956]: E1211 21:48:57.020763 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:57 crc kubenswrapper[4956]: E1211 21:48:57.020833 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.056589 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.056674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.056690 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.056712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.056724 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.159464 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.159516 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.159526 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.159541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.159551 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.262153 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.262206 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.262219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.262238 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.262253 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.364998 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.365061 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.365073 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.365095 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.365107 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.467619 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.467663 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.467671 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.467686 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.467694 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.484130 4956 generic.go:334] "Generic (PLEG): container finished" podID="59601647-5a77-4d78-9821-73873f2cec46" containerID="5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579" exitCode=0 Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.484332 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerDied","Data":"5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.484619 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.501760 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.512885 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.526886 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.548481 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.563652 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.569383 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.569414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.569426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.569443 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.569456 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.574903 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.585812 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.597758 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.610953 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.623857 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.635623 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.645446 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.661830 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.672390 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.673399 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.673468 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.673482 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.673496 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.673507 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.776563 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.776613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.776623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.776638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.776652 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.879172 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.879219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.879231 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.879249 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.879261 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.982124 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.982159 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.982169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.982205 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:57 crc kubenswrapper[4956]: I1211 21:48:57.982217 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:57Z","lastTransitionTime":"2025-12-11T21:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.038568 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.057463 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.070725 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.083592 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.083646 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.083662 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.083683 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.083698 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.086596 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.099803 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.113118 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.123952 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.139134 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.151235 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.170234 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.186655 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.186694 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.186702 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.186714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.186723 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.187369 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.200939 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.222137 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.234707 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.288696 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.288749 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.288761 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.288824 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.288837 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.392610 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.392673 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.392698 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.392728 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.392750 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.494852 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.494915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.494934 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.494959 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.494977 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.496232 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" event={"ID":"59601647-5a77-4d78-9821-73873f2cec46","Type":"ContainerStarted","Data":"0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.496263 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.509037 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.525791 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.544571 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.557017 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.569518 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.581398 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.592035 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.597541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.597575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.597583 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.597596 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.597606 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.602272 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.611914 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.621639 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.634745 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.647597 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.656320 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.668199 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:48:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.700360 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.700400 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.700410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.700428 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.700439 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.803254 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.803301 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.803311 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.803337 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.803351 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.905946 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.906003 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.906012 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.906026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:58 crc kubenswrapper[4956]: I1211 21:48:58.906036 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:58Z","lastTransitionTime":"2025-12-11T21:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.009062 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.009096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.009122 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.009135 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.009143 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.020501 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.020556 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:48:59 crc kubenswrapper[4956]: E1211 21:48:59.020596 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:48:59 crc kubenswrapper[4956]: E1211 21:48:59.020724 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.020901 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:48:59 crc kubenswrapper[4956]: E1211 21:48:59.021045 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.142380 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.142422 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.142433 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.142451 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.142465 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.244496 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.244557 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.244573 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.244596 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.244613 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.347907 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.347959 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.347968 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.347985 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.348001 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.450297 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.450339 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.450352 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.450368 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.450382 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.553332 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.553404 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.553427 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.553455 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.553477 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.655513 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.655572 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.655594 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.655623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.655643 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.758551 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.758600 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.758610 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.758624 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.758633 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.861181 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.861217 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.861226 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.861239 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.861248 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.964545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.964613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.964631 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.964658 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:48:59 crc kubenswrapper[4956]: I1211 21:48:59.964675 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:48:59Z","lastTransitionTime":"2025-12-11T21:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.067616 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.067665 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.067691 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.067714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.067730 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.171389 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.171431 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.171440 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.171456 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.171465 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.260124 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5"] Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.260740 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.263838 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.264131 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.274192 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.274610 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.274630 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.274652 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.274668 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.288402 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.308975 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.334651 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.353543 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.373457 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.377337 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.377368 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.377376 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.377396 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.377419 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.391971 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.407226 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1e32015-9a51-44d0-be08-ecb4f246ddd1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.407319 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv6zh\" (UniqueName: \"kubernetes.io/projected/d1e32015-9a51-44d0-be08-ecb4f246ddd1-kube-api-access-vv6zh\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.407382 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1e32015-9a51-44d0-be08-ecb4f246ddd1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.407642 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1e32015-9a51-44d0-be08-ecb4f246ddd1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.408130 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.427452 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.441122 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.454945 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.470442 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.479567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.479621 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.479634 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.479677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.479701 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.486139 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.503215 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/0.log" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.507080 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a" exitCode=1 Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.507163 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.507374 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.508186 4956 scope.go:117] "RemoveContainer" containerID="c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.508663 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1e32015-9a51-44d0-be08-ecb4f246ddd1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.508819 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1e32015-9a51-44d0-be08-ecb4f246ddd1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.508901 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1e32015-9a51-44d0-be08-ecb4f246ddd1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.508972 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv6zh\" (UniqueName: \"kubernetes.io/projected/d1e32015-9a51-44d0-be08-ecb4f246ddd1-kube-api-access-vv6zh\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.509377 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1e32015-9a51-44d0-be08-ecb4f246ddd1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.510082 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1e32015-9a51-44d0-be08-ecb4f246ddd1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.519458 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1e32015-9a51-44d0-be08-ecb4f246ddd1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.531094 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.543019 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv6zh\" (UniqueName: \"kubernetes.io/projected/d1e32015-9a51-44d0-be08-ecb4f246ddd1-kube-api-access-vv6zh\") pod \"ovnkube-control-plane-749d76644c-vzpq5\" (UID: \"d1e32015-9a51-44d0-be08-ecb4f246ddd1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.553151 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.570274 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.582163 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.583474 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.583524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.583542 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.583565 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.583584 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.588937 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.610290 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: W1211 21:49:00.611849 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1e32015_9a51_44d0_be08_ecb4f246ddd1.slice/crio-9806812d59e6832e20d9bc0501341e04ad5c23f0affddb5c763a034453bee47d WatchSource:0}: Error finding container 9806812d59e6832e20d9bc0501341e04ad5c23f0affddb5c763a034453bee47d: Status 404 returned error can't find the container with id 9806812d59e6832e20d9bc0501341e04ad5c23f0affddb5c763a034453bee47d Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.626896 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.639887 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.654201 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.666054 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.677717 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.686359 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.686391 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.686401 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.686414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.686422 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.690468 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.700898 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.711069 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.724013 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.735039 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.744296 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.755469 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:00Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.788573 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.788612 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.788622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.788636 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.788646 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.890965 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.890996 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.891009 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.891041 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.891053 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.993815 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.993862 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.993872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.993890 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:00 crc kubenswrapper[4956]: I1211 21:49:00.993901 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:00Z","lastTransitionTime":"2025-12-11T21:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.021261 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.021261 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.021404 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.021431 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.021520 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.021608 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.096325 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.096358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.096367 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.096379 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.096388 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.199609 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.199690 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.199714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.199744 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.199797 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.230487 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.230549 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.230567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.230594 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.230612 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.243667 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.248816 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.248867 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.248881 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.248902 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.248920 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.261579 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.267100 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.267188 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.267220 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.267258 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.267287 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.282687 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.286550 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.286592 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.286606 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.286620 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.286629 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.300352 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.306189 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.306237 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.306252 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.306268 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.306279 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.324523 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.324672 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.326255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.326288 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.326297 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.326311 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.326321 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.428671 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.428725 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.428740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.428760 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.428808 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.513260 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" event={"ID":"d1e32015-9a51-44d0-be08-ecb4f246ddd1","Type":"ContainerStarted","Data":"5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.513307 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" event={"ID":"d1e32015-9a51-44d0-be08-ecb4f246ddd1","Type":"ContainerStarted","Data":"7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.513319 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" event={"ID":"d1e32015-9a51-44d0-be08-ecb4f246ddd1","Type":"ContainerStarted","Data":"9806812d59e6832e20d9bc0501341e04ad5c23f0affddb5c763a034453bee47d"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.516298 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/0.log" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.520714 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.520923 4956 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.533745 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.533806 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.533820 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.533837 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.533853 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.535288 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.552201 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.568242 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.584458 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.596673 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.609029 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.624254 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.636638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.636684 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.636696 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.636712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.636723 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.641240 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.659158 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.670386 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.681477 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.694953 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.705800 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.719151 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.728852 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.738232 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-fgzkb"] Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.738731 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:01 crc kubenswrapper[4956]: E1211 21:49:01.738821 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.739209 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.739365 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.739388 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.739396 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.739408 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.739419 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.749460 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.759644 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.771837 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.789540 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.806675 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.816911 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.826880 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.836265 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.841501 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.841549 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.841558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.841571 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.841580 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.846047 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.855468 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.864848 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.878026 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.890451 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.899449 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.911724 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.922913 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.923073 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skp9h\" (UniqueName: \"kubernetes.io/projected/534554e4-788d-4649-9dfc-ab5fd83d37d9-kube-api-access-skp9h\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.923353 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.944464 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.944561 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.944580 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.944602 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.944617 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:01Z","lastTransitionTime":"2025-12-11T21:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.946725 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.974307 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:01 crc kubenswrapper[4956]: I1211 21:49:01.992884 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:01Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.010191 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.023871 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.023920 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skp9h\" (UniqueName: \"kubernetes.io/projected/534554e4-788d-4649-9dfc-ab5fd83d37d9-kube-api-access-skp9h\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.024035 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.024103 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:02.52408548 +0000 UTC m=+34.968463630 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.027815 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.042344 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.044956 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skp9h\" (UniqueName: \"kubernetes.io/projected/534554e4-788d-4649-9dfc-ab5fd83d37d9-kube-api-access-skp9h\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.046567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.046623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.046635 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.046651 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.046662 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.058288 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.071937 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.085454 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.094828 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.104826 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.115072 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.126859 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.139650 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.149674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.149702 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.149711 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.149724 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.149733 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.252113 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.252149 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.252157 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.252177 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.252188 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.355187 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.355257 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.355275 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.355300 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.355318 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.457947 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.458000 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.458010 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.458026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.458038 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.525136 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/1.log" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.525612 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/0.log" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.528981 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.529072 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10" exitCode=1 Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.529113 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.529113 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10"} Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.529166 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:03.529149248 +0000 UTC m=+35.973527408 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.529210 4956 scope.go:117] "RemoveContainer" containerID="c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.530033 4956 scope.go:117] "RemoveContainer" containerID="08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10" Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.530191 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\"" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.551432 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.561809 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.561853 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.561868 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.561893 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.561903 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.566000 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.578019 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.596858 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.611881 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.624583 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.641292 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.694018 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.694550 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.694568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.694593 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.694613 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.695147 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.705889 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.719182 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.730530 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.741385 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.754072 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.765739 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.777762 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.787046 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:02Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.797026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.797057 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.797065 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.797079 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.797088 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.831459 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.831611 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:49:18.831594754 +0000 UTC m=+51.275972904 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.900630 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.900677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.900689 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.900706 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.900720 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:02Z","lastTransitionTime":"2025-12-11T21:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.932738 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.932897 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.932931 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.932966 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933073 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:18.93302575 +0000 UTC m=+51.377403960 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: I1211 21:49:02.933121 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933194 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933199 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933230 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933248 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933257 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933270 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933341 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933344 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:18.933315917 +0000 UTC m=+51.377694137 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933398 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:18.933385649 +0000 UTC m=+51.377763809 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:02 crc kubenswrapper[4956]: E1211 21:49:02.933412 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:18.933404929 +0000 UTC m=+51.377783099 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.004033 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.004287 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.004413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.004487 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.004567 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.020637 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.020720 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.021020 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:03 crc kubenswrapper[4956]: E1211 21:49:03.021189 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:03 crc kubenswrapper[4956]: E1211 21:49:03.021357 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:03 crc kubenswrapper[4956]: E1211 21:49:03.021551 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.106901 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.106944 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.106954 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.106970 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.106983 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.231086 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.231137 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.231154 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.231189 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.231205 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.333327 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.333382 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.333394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.333410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.333422 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.436188 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.436270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.436287 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.436305 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.436316 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.535726 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/1.log" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.538451 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.538490 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.538506 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.538526 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.538543 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.539264 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:03 crc kubenswrapper[4956]: E1211 21:49:03.539431 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:03 crc kubenswrapper[4956]: E1211 21:49:03.539503 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:05.539481952 +0000 UTC m=+37.983860122 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.640825 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.640874 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.640891 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.640915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.640933 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.743571 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.743638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.743660 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.743690 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.743714 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.846642 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.846725 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.846738 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.846756 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.846782 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.949056 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.949104 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.949120 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.949141 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:03 crc kubenswrapper[4956]: I1211 21:49:03.949160 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:03Z","lastTransitionTime":"2025-12-11T21:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.021594 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:04 crc kubenswrapper[4956]: E1211 21:49:04.022160 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.050674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.050712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.050720 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.050734 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.050744 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.153022 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.153107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.153130 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.153158 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.153184 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.256290 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.256351 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.256368 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.256391 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.256411 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.359016 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.359096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.359128 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.359156 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.359181 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.461710 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.461873 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.461900 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.461930 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.461950 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.564874 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.564944 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.564961 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.564985 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.565003 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.667749 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.667841 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.667858 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.667882 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.667899 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.770807 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.770853 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.770862 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.770879 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.770888 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.874191 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.874237 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.874246 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.874260 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.874270 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.976701 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.976838 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.976866 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.976900 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:04 crc kubenswrapper[4956]: I1211 21:49:04.976927 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:04Z","lastTransitionTime":"2025-12-11T21:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.020796 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.020880 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:05 crc kubenswrapper[4956]: E1211 21:49:05.020939 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:05 crc kubenswrapper[4956]: E1211 21:49:05.021014 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.020876 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:05 crc kubenswrapper[4956]: E1211 21:49:05.021264 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.080360 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.080430 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.080456 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.080486 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.080511 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.183502 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.183570 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.183587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.183609 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.183627 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.286609 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.286668 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.286680 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.286699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.286712 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.388877 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.388918 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.388927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.388941 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.388957 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.491304 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.491355 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.491364 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.491378 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.491389 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.562180 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:05 crc kubenswrapper[4956]: E1211 21:49:05.562364 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:05 crc kubenswrapper[4956]: E1211 21:49:05.562488 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:09.562463549 +0000 UTC m=+42.006841729 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.593875 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.593915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.593923 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.593936 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.593944 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.696742 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.696811 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.696824 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.696843 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.696855 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.799132 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.799169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.799178 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.799190 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.799200 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.902101 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.902229 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.902248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.902272 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:05 crc kubenswrapper[4956]: I1211 21:49:05.902292 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:05Z","lastTransitionTime":"2025-12-11T21:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.005569 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.005656 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.005691 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.005722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.005743 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.021169 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:06 crc kubenswrapper[4956]: E1211 21:49:06.021406 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.108517 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.108616 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.108636 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.108661 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.108677 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.207189 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.210554 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.210580 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.210590 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.210605 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.210616 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.225297 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.247346 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.273481 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.296009 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.313149 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.313232 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.313250 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.313270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.313316 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.317362 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.336716 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.352969 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.368949 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.383106 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.394573 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.407664 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.415228 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.415263 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.415274 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.415290 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.415301 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.421748 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.440799 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.456597 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.485943 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.500239 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:06Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.519320 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.519376 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.519395 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.519430 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.519446 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.622656 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.622697 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.622709 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.622727 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.622739 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.725985 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.726035 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.726047 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.726065 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.726078 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.829698 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.829751 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.829778 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.829796 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.829807 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.932624 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.932692 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.932703 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.932720 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:06 crc kubenswrapper[4956]: I1211 21:49:06.932731 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:06Z","lastTransitionTime":"2025-12-11T21:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.021108 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.021110 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.021133 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:07 crc kubenswrapper[4956]: E1211 21:49:07.021446 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:07 crc kubenswrapper[4956]: E1211 21:49:07.021528 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:07 crc kubenswrapper[4956]: E1211 21:49:07.021290 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.034920 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.034992 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.035017 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.035046 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.035069 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.137947 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.138018 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.138039 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.138070 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.138093 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.240414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.240459 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.240468 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.240483 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.240495 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.344059 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.344104 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.344115 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.344129 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.344140 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.447512 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.447563 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.447584 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.447600 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.447611 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.550476 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.550545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.550566 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.550591 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.550611 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.653914 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.653979 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.653999 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.654026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.654045 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.756875 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.756935 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.756951 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.756977 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.756993 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.860276 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.860309 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.860318 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.860333 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.860343 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.963894 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.964567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.964617 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.964645 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:07 crc kubenswrapper[4956]: I1211 21:49:07.964663 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:07Z","lastTransitionTime":"2025-12-11T21:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.020845 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:08 crc kubenswrapper[4956]: E1211 21:49:08.021080 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.035369 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.048084 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.065515 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.067377 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.067438 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.067455 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.067484 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.067503 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.081023 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.098129 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.117309 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.139515 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7885bb4dbe14fcf1c4f26f809fe65e9427a17a93a7db9ba70722830bbcd097a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1211 21:48:59.340664 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1211 21:48:59.340678 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1211 21:48:59.340691 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1211 21:48:59.340696 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1211 21:48:59.340715 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1211 21:48:59.340747 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1211 21:48:59.340792 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1211 21:48:59.340807 6188 factory.go:656] Stopping watch factory\\\\nI1211 21:48:59.340820 6188 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:48:59.340847 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1211 21:48:59.340855 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1211 21:48:59.340862 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1211 21:48:59.340868 6188 handler.go:208] Removed *v1.Node event handler 7\\\\nI1211 21:48:59.340873 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1211 21:48:59.340878 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1211 2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.154619 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.169059 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.172542 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.172611 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.172633 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.172666 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.172691 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.189503 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.211400 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.233665 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.254090 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.271552 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.275638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.275701 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.275720 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.275747 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.275821 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.287486 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.301663 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:08Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.379248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.379310 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.379331 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.379354 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.379372 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.482527 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.482582 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.482599 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.482623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.482641 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.586908 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.586986 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.587010 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.587036 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.587138 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.690499 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.690567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.690585 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.690613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.690630 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.793521 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.793575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.793592 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.793617 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.793634 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.896204 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.896255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.896267 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.896285 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.896301 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.998921 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.998986 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.999003 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.999027 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:08 crc kubenswrapper[4956]: I1211 21:49:08.999044 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:08Z","lastTransitionTime":"2025-12-11T21:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.020427 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.020464 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.020569 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:09 crc kubenswrapper[4956]: E1211 21:49:09.020721 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:09 crc kubenswrapper[4956]: E1211 21:49:09.020900 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:09 crc kubenswrapper[4956]: E1211 21:49:09.021017 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.102016 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.102097 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.102121 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.102153 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.102179 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.205337 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.205413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.205436 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.205461 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.205477 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.313330 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.313399 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.313417 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.313445 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.313467 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.416251 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.416302 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.416315 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.416333 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.416346 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.518869 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.518933 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.518950 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.518975 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.519000 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.605508 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:09 crc kubenswrapper[4956]: E1211 21:49:09.605736 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:09 crc kubenswrapper[4956]: E1211 21:49:09.605895 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:17.605863895 +0000 UTC m=+50.050242085 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.638061 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.638134 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.638159 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.638191 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.638215 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.740485 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.740543 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.740558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.740613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.740628 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.842896 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.842937 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.842949 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.842966 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.842978 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.948835 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.948894 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.948905 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.948925 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:09 crc kubenswrapper[4956]: I1211 21:49:09.948940 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:09Z","lastTransitionTime":"2025-12-11T21:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.020992 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:10 crc kubenswrapper[4956]: E1211 21:49:10.021118 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.050739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.050849 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.050876 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.050908 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.050931 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.154426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.154489 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.154506 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.154524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.154537 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.257667 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.257800 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.257828 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.257860 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.257882 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.361268 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.361342 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.361360 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.361381 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.361396 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.463588 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.463647 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.463662 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.463682 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.463695 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.565699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.565820 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.565847 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.565868 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.565882 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.668037 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.668095 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.668107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.668125 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.668140 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.770175 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.770222 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.770237 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.770252 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.770262 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.844057 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.844858 4956 scope.go:117] "RemoveContainer" containerID="08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10" Dec 11 21:49:10 crc kubenswrapper[4956]: E1211 21:49:10.845027 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\"" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.862019 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.872449 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.872502 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.872520 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.872540 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.872556 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.876654 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.892309 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.906731 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.925496 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.947359 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.976116 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.976165 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.976180 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.976202 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.976221 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:10Z","lastTransitionTime":"2025-12-11T21:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.978880 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:10 crc kubenswrapper[4956]: I1211 21:49:10.995985 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:10Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.011077 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.021066 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.021139 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.021190 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.021274 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.021376 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.021475 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.025025 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.037043 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.051317 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.078189 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.078236 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.078247 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.078264 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.078275 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.092725 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.102516 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.111078 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.119894 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.181372 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.181439 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.181463 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.181496 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.181520 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.283995 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.284024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.284031 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.284045 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.284054 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.386848 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.386892 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.386910 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.386926 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.386935 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.489533 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.489606 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.489628 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.489658 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.489679 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.592523 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.592589 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.592598 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.592611 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.592622 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.616554 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.616599 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.616609 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.616625 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.616638 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.629274 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.633215 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.633247 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.633255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.633269 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.633279 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.651294 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.655151 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.655194 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.655204 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.655221 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.655233 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.667867 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.672646 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.672699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.672716 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.672738 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.672755 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.689222 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.693873 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.693904 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.693912 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.693929 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.693939 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.711196 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:11Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:11 crc kubenswrapper[4956]: E1211 21:49:11.711344 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.713584 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.713632 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.713643 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.713659 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.713672 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.816330 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.816943 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.817162 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.817431 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.817695 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.920521 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.920566 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.920581 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.921064 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:11 crc kubenswrapper[4956]: I1211 21:49:11.921097 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:11Z","lastTransitionTime":"2025-12-11T21:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.020964 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:12 crc kubenswrapper[4956]: E1211 21:49:12.021215 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.023911 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.023975 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.023999 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.024026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.024050 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.127005 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.127098 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.127139 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.127171 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.127194 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.229626 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.229674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.229692 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.229710 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.229721 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.333060 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.333102 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.333111 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.333124 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.333132 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.435583 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.435649 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.435673 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.435703 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.435725 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.538606 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.538664 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.538674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.538693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.538710 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.641715 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.641846 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.641859 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.641876 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.641886 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.744152 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.744200 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.744215 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.744235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.744249 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.845888 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.845927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.845941 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.845960 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.845972 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.948029 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.948068 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.948076 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.948090 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:12 crc kubenswrapper[4956]: I1211 21:49:12.948099 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:12Z","lastTransitionTime":"2025-12-11T21:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.020441 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.020480 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.020506 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:13 crc kubenswrapper[4956]: E1211 21:49:13.020592 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:13 crc kubenswrapper[4956]: E1211 21:49:13.020667 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:13 crc kubenswrapper[4956]: E1211 21:49:13.020760 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.051470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.051510 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.051520 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.051535 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.051548 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.153796 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.153841 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.153852 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.153866 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.153876 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.256522 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.256591 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.256604 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.256629 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.256643 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.359512 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.359590 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.359608 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.359634 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.359653 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.462397 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.462470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.462483 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.462504 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.462557 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.565081 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.565118 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.565126 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.565140 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.565153 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.667766 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.667833 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.667846 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.667865 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.667883 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.770825 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.770900 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.770924 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.770954 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.770976 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.874730 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.874811 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.874820 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.874834 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.874846 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.977632 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.977712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.977730 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.977749 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:13 crc kubenswrapper[4956]: I1211 21:49:13.977761 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:13Z","lastTransitionTime":"2025-12-11T21:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.020268 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:14 crc kubenswrapper[4956]: E1211 21:49:14.020479 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.080718 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.080798 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.080831 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.080852 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.080868 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.183200 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.183254 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.183265 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.183284 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.183296 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.285657 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.285711 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.285722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.285744 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.285757 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.388481 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.388516 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.388525 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.388539 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.388550 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.491814 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.491880 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.491892 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.491908 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.491938 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.594470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.594545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.594561 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.594586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.594606 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.698267 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.698334 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.698350 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.698377 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.698395 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.800658 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.800750 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.800808 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.800839 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.800860 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.903133 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.903176 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.903187 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.903199 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:14 crc kubenswrapper[4956]: I1211 21:49:14.903210 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:14Z","lastTransitionTime":"2025-12-11T21:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.006850 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.006893 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.006904 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.006918 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.006929 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.020959 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.021027 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.021070 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:15 crc kubenswrapper[4956]: E1211 21:49:15.021185 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:15 crc kubenswrapper[4956]: E1211 21:49:15.021276 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:15 crc kubenswrapper[4956]: E1211 21:49:15.021421 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.110441 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.110511 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.110528 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.110553 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.110571 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.213552 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.213627 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.213650 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.213681 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.213704 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.315992 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.316030 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.316042 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.316058 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.316068 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.417996 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.418025 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.418034 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.418046 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.418054 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.520095 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.520157 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.520179 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.520210 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.520234 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.624043 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.624114 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.624138 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.624169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.624197 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.727313 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.727352 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.727363 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.727378 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.727389 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.830625 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.830672 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.830682 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.830699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.830715 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.933597 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.933685 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.933708 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.933739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:15 crc kubenswrapper[4956]: I1211 21:49:15.933760 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:15Z","lastTransitionTime":"2025-12-11T21:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.021195 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:16 crc kubenswrapper[4956]: E1211 21:49:16.021433 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.036488 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.036543 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.036559 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.036582 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.036604 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.139808 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.139872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.139890 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.139916 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.139965 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.243095 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.243193 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.243220 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.243254 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.243290 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.346605 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.346693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.346720 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.346752 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.346812 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.449565 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.449827 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.449938 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.450030 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.450104 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.553590 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.554214 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.554251 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.554275 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.554288 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.657199 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.657264 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.657281 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.657348 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.657398 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.760290 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.760361 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.760380 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.760404 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.760422 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.862867 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.862911 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.862924 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.862943 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.862958 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.966676 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.966743 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.966759 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.966806 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:16 crc kubenswrapper[4956]: I1211 21:49:16.966821 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:16Z","lastTransitionTime":"2025-12-11T21:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.020879 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.020963 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:17 crc kubenswrapper[4956]: E1211 21:49:17.021111 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.020988 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:17 crc kubenswrapper[4956]: E1211 21:49:17.021311 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:17 crc kubenswrapper[4956]: E1211 21:49:17.021519 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.069417 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.069538 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.069638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.069712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.069739 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.174280 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.174367 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.174387 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.174412 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.174460 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.259355 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.271329 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.278713 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.278793 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.278809 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.278830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.278847 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.281455 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.308700 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.332006 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.350383 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.368999 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.381011 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.381048 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.381059 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.381075 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.381087 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.381513 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.392897 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.403230 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.418058 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.429978 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.443016 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.456072 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.465833 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.475673 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.483274 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.483319 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.483331 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.483347 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.483357 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.486561 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.498926 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:17Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.586548 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.586916 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.586929 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.586944 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.586955 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.689884 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.689950 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.689986 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.690014 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.690037 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.693497 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:17 crc kubenswrapper[4956]: E1211 21:49:17.693721 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:17 crc kubenswrapper[4956]: E1211 21:49:17.693847 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:33.693821188 +0000 UTC m=+66.138199368 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.793233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.793716 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.793915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.794098 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.794245 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.897429 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.897492 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.897510 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.897536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:17 crc kubenswrapper[4956]: I1211 21:49:17.897553 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:17Z","lastTransitionTime":"2025-12-11T21:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.000602 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.000658 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.000675 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.000699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.000716 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.020717 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:18 crc kubenswrapper[4956]: E1211 21:49:18.021152 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.042239 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.059400 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.113655 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.117311 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.117368 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.117385 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.117415 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.117432 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.139125 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.168030 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.189987 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.203553 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.220220 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.220259 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.220270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.220288 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.220300 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.221652 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.238867 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.258751 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.276085 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.293127 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.308666 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.323211 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.323273 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.323291 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.323315 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.323332 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.324351 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.339424 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.350583 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.364238 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:18Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.426216 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.426745 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.427160 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.427472 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.427822 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.531489 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.531550 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.531564 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.531586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.531606 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.635024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.636048 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.636092 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.636124 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.636148 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.739073 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.739155 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.739178 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.739211 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.739234 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.842231 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.842318 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.842341 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.842373 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.842395 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.924486 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:49:18 crc kubenswrapper[4956]: E1211 21:49:18.924732 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:49:50.924689381 +0000 UTC m=+83.369067531 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.945503 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.945569 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.945586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.945613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:18 crc kubenswrapper[4956]: I1211 21:49:18.945633 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:18Z","lastTransitionTime":"2025-12-11T21:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.020567 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.020631 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.020577 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.020803 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.020924 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.021013 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.026152 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.026217 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.026245 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.026266 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026426 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026497 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:51.026476865 +0000 UTC m=+83.470855015 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026537 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026563 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:51.026557667 +0000 UTC m=+83.470935817 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026636 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026649 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026662 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026686 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:51.02667993 +0000 UTC m=+83.471058080 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026735 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026747 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026755 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:19 crc kubenswrapper[4956]: E1211 21:49:19.026805 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:49:51.026798653 +0000 UTC m=+83.471176803 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.048149 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.048235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.048254 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.048285 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.048306 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.150837 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.150876 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.150885 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.150899 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.150911 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.254032 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.254096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.254107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.254126 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.254141 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.356697 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.356739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.356748 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.356784 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.356799 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.459165 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.459198 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.459206 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.459218 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.459226 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.561653 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.561718 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.561747 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.561762 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.561787 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.664415 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.664469 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.664485 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.664505 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.664521 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.767830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.767895 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.767918 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.767948 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.767970 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.871131 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.871192 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.871204 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.871219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.871230 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.973897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.973993 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.974035 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.974070 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:19 crc kubenswrapper[4956]: I1211 21:49:19.974093 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:19Z","lastTransitionTime":"2025-12-11T21:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.021122 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:20 crc kubenswrapper[4956]: E1211 21:49:20.021362 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.077562 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.077672 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.077699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.077730 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.077750 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.181432 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.181514 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.181536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.181568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.181586 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.285096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.285233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.285254 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.285280 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.285297 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.388531 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.388639 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.388662 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.388695 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.388713 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.491717 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.491864 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.491893 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.491923 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.491946 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.594725 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.594804 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.594817 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.594834 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.594849 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.697579 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.697626 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.697638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.697655 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.697672 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.800960 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.801040 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.801060 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.801082 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.801095 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.903384 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.903442 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.903461 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.903485 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:20 crc kubenswrapper[4956]: I1211 21:49:20.903502 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:20Z","lastTransitionTime":"2025-12-11T21:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.005437 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.005495 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.005511 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.005533 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.005596 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.020894 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.020919 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.020937 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:21 crc kubenswrapper[4956]: E1211 21:49:21.021004 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:21 crc kubenswrapper[4956]: E1211 21:49:21.021164 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:21 crc kubenswrapper[4956]: E1211 21:49:21.021212 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.109051 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.109300 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.109318 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.109343 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.109361 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.212577 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.212641 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.212677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.212707 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.212728 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.315882 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.315928 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.315939 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.315956 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.315968 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.417875 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.417909 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.417917 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.417929 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.417938 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.521489 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.521569 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.521592 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.521619 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.521642 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.624489 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.624560 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.624580 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.624603 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.624622 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.727394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.727428 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.727438 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.727477 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.727506 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.829475 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.829536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.829546 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.829561 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.829570 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.932251 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.932332 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.932354 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.932385 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:21 crc kubenswrapper[4956]: I1211 21:49:21.932403 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:21Z","lastTransitionTime":"2025-12-11T21:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.020722 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.020938 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.033832 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.033868 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.033875 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.033887 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.033896 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.085417 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.085457 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.085465 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.085479 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.085489 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.099873 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:22Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.103737 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.103901 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.103927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.103960 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.103986 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.125635 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:22Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.130044 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.130091 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.130106 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.130127 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.130333 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.150638 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:22Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.156711 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.156792 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.156806 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.156828 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.156842 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.176600 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:22Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.181012 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.181060 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.181070 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.181089 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.181101 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.194998 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:22Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:22 crc kubenswrapper[4956]: E1211 21:49:22.195141 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.197395 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.197426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.197434 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.197447 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.197456 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.300314 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.300376 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.300392 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.300414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.300431 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.403358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.403400 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.403411 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.403426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.403438 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.505901 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.505989 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.506014 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.506046 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.506070 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.607743 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.607808 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.607819 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.607835 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.607847 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.710808 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.710853 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.710867 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.710888 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.710903 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.813546 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.813598 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.813609 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.813625 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.813636 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.917294 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.917347 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.917361 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.917379 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:22 crc kubenswrapper[4956]: I1211 21:49:22.917392 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:22Z","lastTransitionTime":"2025-12-11T21:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.019841 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.019878 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.019887 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.019900 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.019909 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.020119 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.020188 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:23 crc kubenswrapper[4956]: E1211 21:49:23.020239 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:23 crc kubenswrapper[4956]: E1211 21:49:23.020279 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.020289 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:23 crc kubenswrapper[4956]: E1211 21:49:23.020382 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.122449 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.122487 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.122534 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.122552 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.122564 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.225323 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.225394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.225414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.225441 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.225474 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.328395 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.328490 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.328514 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.328539 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.328555 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.431169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.431223 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.431239 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.431261 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.431278 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.534314 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.534379 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.534397 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.534423 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.534440 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.636624 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.636672 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.636685 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.636702 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.636715 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.740150 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.740269 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.740288 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.740313 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.740332 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.843830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.843918 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.843943 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.844009 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.844034 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.946885 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.946959 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.946976 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.947000 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:23 crc kubenswrapper[4956]: I1211 21:49:23.947035 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:23Z","lastTransitionTime":"2025-12-11T21:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.020494 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:24 crc kubenswrapper[4956]: E1211 21:49:24.020758 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.049581 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.049666 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.049693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.049722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.049744 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.153025 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.153102 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.153141 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.153175 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.153198 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.255229 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.255291 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.255304 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.255327 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.255342 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.358339 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.358410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.358433 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.358464 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.358482 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.461235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.461301 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.461315 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.461332 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.461344 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.563528 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.563568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.563576 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.563590 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.563599 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.665297 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.665340 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.665351 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.665366 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.665376 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.767612 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.767651 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.767659 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.767672 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.767681 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.870856 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.870924 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.870960 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.870995 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.871016 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.973356 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.973433 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.973457 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.973493 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:24 crc kubenswrapper[4956]: I1211 21:49:24.973518 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:24Z","lastTransitionTime":"2025-12-11T21:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.020390 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.020439 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:25 crc kubenswrapper[4956]: E1211 21:49:25.020522 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.020570 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:25 crc kubenswrapper[4956]: E1211 21:49:25.020831 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:25 crc kubenswrapper[4956]: E1211 21:49:25.021469 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.022128 4956 scope.go:117] "RemoveContainer" containerID="08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.077349 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.077402 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.077415 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.077435 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.077451 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.180228 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.180265 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.180276 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.180295 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.180307 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.293183 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.293222 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.293233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.293248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.293259 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.396487 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.396536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.396559 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.396575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.396586 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.499307 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.499669 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.499681 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.499700 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.499716 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.601577 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.601626 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.601642 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.601664 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.601681 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.615136 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/1.log" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.617887 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.618433 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.631299 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.647850 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.669389 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.690787 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.704726 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.704785 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.704794 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.704809 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.704818 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.714110 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.726575 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.736943 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.746369 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.758957 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.771748 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.783819 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.796387 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.807687 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.807736 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.807755 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.807828 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.807894 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.810065 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.820277 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.836679 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.851159 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.863977 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:25Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.910324 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.910389 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.910401 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.910419 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:25 crc kubenswrapper[4956]: I1211 21:49:25.910431 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:25Z","lastTransitionTime":"2025-12-11T21:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.013638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.013734 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.013766 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.013919 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.014025 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.021262 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:26 crc kubenswrapper[4956]: E1211 21:49:26.021480 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.116920 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.116987 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.117005 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.117032 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.117051 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.221497 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.221564 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.221580 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.221601 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.221621 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.324251 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.324290 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.324301 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.324319 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.324330 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.426829 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.426900 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.426922 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.426952 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.426973 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.529836 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.530067 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.530099 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.530126 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.530146 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.624237 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/2.log" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.625192 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/1.log" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.629115 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" exitCode=1 Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.629210 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.629271 4956 scope.go:117] "RemoveContainer" containerID="08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.630321 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 21:49:26 crc kubenswrapper[4956]: E1211 21:49:26.630921 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\"" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.632650 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.632677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.632687 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.632701 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.632710 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.651701 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.665067 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.677583 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.694837 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.716352 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08775b7554e61eb831a833a467dd43277127a364155e700ba9e23d1ecd100a10\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"odePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1211 21:49:01.534613 6371 ovnkube.go:599] Stopped ovnkube\\\\nI1211 21:49:01.533844 6371 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}\\\\nI1211 21:49:01.534826 6371 services_controller.go:360] Finished syncing service metrics on namespace openshift-controller-manager-operator for network=default : 9.095617ms\\\\nI1211 21:49:01.534850 6371 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-daemon for network=default\\\\nI1211 21:49:01.534869 6371 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:01.534926 6371 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nF1211 21:49:01.534948 6371 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.732071 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.734962 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.735001 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.735013 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.735031 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.735043 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.748460 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.764280 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.777988 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.792348 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.805759 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.818543 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.827595 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.836810 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.836809 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.836866 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.836932 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.836945 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.836954 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.850094 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.860813 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.871434 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:26Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.938907 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.938951 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.938961 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.938975 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:26 crc kubenswrapper[4956]: I1211 21:49:26.938986 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:26Z","lastTransitionTime":"2025-12-11T21:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.020956 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.021096 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.021325 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:27 crc kubenswrapper[4956]: E1211 21:49:27.021327 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:27 crc kubenswrapper[4956]: E1211 21:49:27.021420 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:27 crc kubenswrapper[4956]: E1211 21:49:27.021496 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.040646 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.040685 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.040694 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.040708 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.040717 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.143830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.143886 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.143897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.143915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.143928 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.246559 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.246628 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.246653 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.246683 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.246709 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.349889 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.349951 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.349970 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.349991 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.350006 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.452382 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.452441 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.452457 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.452481 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.452498 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.555649 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.556031 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.556124 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.556247 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.556347 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.636431 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/2.log" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.645009 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 21:49:27 crc kubenswrapper[4956]: E1211 21:49:27.645913 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\"" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.658358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.658416 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.658426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.658438 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.658447 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.660379 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.673654 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.686300 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.699835 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.712673 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.741195 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.758179 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.761098 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.761160 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.761173 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.761191 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.761203 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.774403 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.791815 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.804549 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.816349 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.827051 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.837641 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.847076 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.859178 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.863519 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.863567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.863583 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.863604 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.863619 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.872559 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.885124 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:27Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.965886 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.966490 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.966530 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.966552 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:27 crc kubenswrapper[4956]: I1211 21:49:27.966565 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:27Z","lastTransitionTime":"2025-12-11T21:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.023571 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:28 crc kubenswrapper[4956]: E1211 21:49:28.024111 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.041165 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.060515 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.069217 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.069250 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.069258 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.069272 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.069284 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.074305 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.088787 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.100184 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.116236 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.133946 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.146389 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.156083 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.166143 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.171957 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.172004 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.172017 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.172038 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.172051 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.178439 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.189752 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.198745 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.216351 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.246237 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.259804 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.274613 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:28Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.274668 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.274706 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.274718 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.274737 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.274749 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.377878 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.377945 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.377970 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.378001 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.378023 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.482164 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.482290 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.482315 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.482345 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.482369 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.585994 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.586048 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.586066 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.586088 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.586105 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.688880 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.688921 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.688930 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.688948 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.688958 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.791650 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.791688 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.791696 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.791711 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.791720 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.894304 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.894368 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.894386 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.894413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.894433 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.996525 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.997141 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.997155 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.997168 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:28 crc kubenswrapper[4956]: I1211 21:49:28.997190 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:28Z","lastTransitionTime":"2025-12-11T21:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.020193 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.020284 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.020200 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:29 crc kubenswrapper[4956]: E1211 21:49:29.020395 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:29 crc kubenswrapper[4956]: E1211 21:49:29.020667 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:29 crc kubenswrapper[4956]: E1211 21:49:29.020556 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.100392 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.100474 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.100492 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.100517 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.100534 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.203338 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.203375 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.203384 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.203399 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.203409 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.305583 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.305632 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.305644 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.305661 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.305680 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.408047 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.408088 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.408097 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.408114 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.408124 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.510999 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.511048 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.511061 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.511080 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.511093 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.613998 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.614074 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.614099 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.614128 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.614155 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.716713 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.716813 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.716831 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.716854 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.716866 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.819077 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.819160 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.819176 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.819197 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.819211 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.922259 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.922299 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.922309 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.922324 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:29 crc kubenswrapper[4956]: I1211 21:49:29.922337 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:29Z","lastTransitionTime":"2025-12-11T21:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.020705 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:30 crc kubenswrapper[4956]: E1211 21:49:30.020922 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.024345 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.024372 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.024380 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.024413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.024422 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.126987 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.127048 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.127064 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.127085 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.127099 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.229511 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.229558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.229580 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.229602 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.229616 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.332423 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.332461 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.332473 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.332490 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.332501 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.435465 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.435499 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.435507 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.435520 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.435528 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.538107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.538147 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.538158 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.538172 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.538182 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.640686 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.640756 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.640815 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.640837 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.640851 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.744391 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.744429 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.744442 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.744458 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.744469 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.847110 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.847175 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.847195 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.847221 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.847239 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.949819 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.949849 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.949856 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.949869 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:30 crc kubenswrapper[4956]: I1211 21:49:30.949877 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:30Z","lastTransitionTime":"2025-12-11T21:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.020190 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.020190 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:31 crc kubenswrapper[4956]: E1211 21:49:31.020375 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:31 crc kubenswrapper[4956]: E1211 21:49:31.020308 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.020206 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:31 crc kubenswrapper[4956]: E1211 21:49:31.020560 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.051653 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.051685 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.051693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.051706 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.051715 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.154143 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.154202 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.154216 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.154235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.154248 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.256277 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.256322 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.256334 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.256349 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.256360 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.358352 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.358383 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.358394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.358407 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.358417 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.463753 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.463848 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.463867 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.463891 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.463915 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.566930 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.566977 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.566990 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.567007 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.567019 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.669050 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.669393 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.669406 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.669424 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.669436 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.771073 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.771198 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.771208 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.771227 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.771244 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.873472 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.873524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.873533 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.873545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.873556 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.976561 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.976622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.976641 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.976666 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:31 crc kubenswrapper[4956]: I1211 21:49:31.976682 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:31Z","lastTransitionTime":"2025-12-11T21:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.020903 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.021081 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.079483 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.079561 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.079587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.079617 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.079637 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.183256 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.183286 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.183297 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.183314 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.183326 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.285844 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.285880 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.285891 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.285907 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.285918 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.388739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.388845 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.388872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.388979 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.389001 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.420906 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.420981 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.421009 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.421037 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.421059 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.434551 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:32Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.438409 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.438430 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.438438 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.438449 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.438456 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.450849 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:32Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.454486 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.454659 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.454820 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.454947 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.455086 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.468074 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:32Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.472001 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.472059 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.472071 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.472085 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.472095 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.484071 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:32Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.488070 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.488125 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.488140 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.488157 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.488169 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.499977 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:32Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:32 crc kubenswrapper[4956]: E1211 21:49:32.500144 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.501523 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.501551 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.501563 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.501581 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.501592 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.603908 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.603937 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.603946 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.603960 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.603970 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.706435 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.706523 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.706540 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.706619 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.706638 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.809137 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.809187 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.809197 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.809219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.809231 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.911783 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.911825 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.911834 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.911849 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:32 crc kubenswrapper[4956]: I1211 21:49:32.911859 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:32Z","lastTransitionTime":"2025-12-11T21:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.014257 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.014328 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.014343 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.014359 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.014371 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.020556 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.020589 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.020631 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:33 crc kubenswrapper[4956]: E1211 21:49:33.020699 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:33 crc kubenswrapper[4956]: E1211 21:49:33.020838 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:33 crc kubenswrapper[4956]: E1211 21:49:33.020922 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.117764 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.117829 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.117841 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.117859 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.117874 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.220172 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.220222 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.220233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.220248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.220257 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.323682 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.324101 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.324181 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.324285 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.324378 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.426360 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.426409 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.426421 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.426438 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.426451 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.529024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.529086 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.529097 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.529112 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.529122 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.631815 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.631862 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.631871 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.631887 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.631900 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.700173 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:33 crc kubenswrapper[4956]: E1211 21:49:33.700485 4956 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:33 crc kubenswrapper[4956]: E1211 21:49:33.700649 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs podName:534554e4-788d-4649-9dfc-ab5fd83d37d9 nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.700617682 +0000 UTC m=+98.144995862 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs") pod "network-metrics-daemon-fgzkb" (UID: "534554e4-788d-4649-9dfc-ab5fd83d37d9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.736549 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.736589 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.736597 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.736612 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.736626 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.839282 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.839317 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.839326 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.839339 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.839350 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.941849 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.941904 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.941921 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.941945 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:33 crc kubenswrapper[4956]: I1211 21:49:33.941963 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:33Z","lastTransitionTime":"2025-12-11T21:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.021034 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:34 crc kubenswrapper[4956]: E1211 21:49:34.021237 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.045171 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.045223 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.045241 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.045263 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.045280 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.147961 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.148520 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.148617 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.148722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.148832 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.251108 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.251395 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.251476 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.251541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.251598 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.354532 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.354570 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.354581 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.354594 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.354604 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.457294 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.457333 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.457342 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.457355 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.457366 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.559500 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.559540 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.559552 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.559568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.559579 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.660977 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.661016 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.661027 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.661043 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.661053 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.763296 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.763358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.763377 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.763399 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.763417 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.865678 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.865714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.865722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.865736 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.865744 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.968309 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.968344 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.968353 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.968369 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:34 crc kubenswrapper[4956]: I1211 21:49:34.968380 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:34Z","lastTransitionTime":"2025-12-11T21:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.020533 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.020533 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:35 crc kubenswrapper[4956]: E1211 21:49:35.020672 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:35 crc kubenswrapper[4956]: E1211 21:49:35.020754 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.020551 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:35 crc kubenswrapper[4956]: E1211 21:49:35.020908 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.070422 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.070468 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.070480 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.070497 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.070511 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.172760 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.172816 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.172827 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.172842 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.172853 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.275013 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.275050 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.275058 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.275074 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.275083 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.377121 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.377164 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.377175 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.377197 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.377210 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.479373 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.479424 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.479436 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.479454 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.479466 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.581687 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.581756 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.581797 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.581823 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.581845 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.663590 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p8slf_3f5c3105-d748-4563-b3f7-a566d31a3031/kube-multus/0.log" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.663635 4956 generic.go:334] "Generic (PLEG): container finished" podID="3f5c3105-d748-4563-b3f7-a566d31a3031" containerID="29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c" exitCode=1 Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.663668 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p8slf" event={"ID":"3f5c3105-d748-4563-b3f7-a566d31a3031","Type":"ContainerDied","Data":"29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.664141 4956 scope.go:117] "RemoveContainer" containerID="29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.679135 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.686639 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.686698 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.686720 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.686749 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.686803 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.699998 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.716934 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.729188 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"2025-12-11T21:48:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d\\\\n2025-12-11T21:48:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d to /host/opt/cni/bin/\\\\n2025-12-11T21:48:50Z [verbose] multus-daemon started\\\\n2025-12-11T21:48:50Z [verbose] Readiness Indicator file check\\\\n2025-12-11T21:49:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.741308 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.755350 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.768178 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.780462 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.788798 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.788828 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.788838 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.788852 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.788863 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.793644 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.802905 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.813647 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.823632 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.835000 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.844540 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.852719 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.864375 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.877321 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:35Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.891077 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.891116 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.891127 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.891144 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.891160 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.993499 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.993560 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.993569 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.993584 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:35 crc kubenswrapper[4956]: I1211 21:49:35.993594 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:35Z","lastTransitionTime":"2025-12-11T21:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.020945 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:36 crc kubenswrapper[4956]: E1211 21:49:36.021076 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.095558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.095606 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.095617 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.095634 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.095644 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.198195 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.198273 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.198296 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.198330 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.198351 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.301375 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.301435 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.301454 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.301480 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.301504 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.404418 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.404458 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.404470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.404487 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.404499 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.506927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.506966 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.506976 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.506989 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.506998 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.609481 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.609515 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.609524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.609537 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.609547 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.667991 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p8slf_3f5c3105-d748-4563-b3f7-a566d31a3031/kube-multus/0.log" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.668050 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p8slf" event={"ID":"3f5c3105-d748-4563-b3f7-a566d31a3031","Type":"ContainerStarted","Data":"01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.682451 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.693115 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.702035 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.712265 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.712405 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.712492 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.712607 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.712695 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.715398 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.725859 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.744671 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.759696 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"2025-12-11T21:48:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d\\\\n2025-12-11T21:48:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d to /host/opt/cni/bin/\\\\n2025-12-11T21:48:50Z [verbose] multus-daemon started\\\\n2025-12-11T21:48:50Z [verbose] Readiness Indicator file check\\\\n2025-12-11T21:49:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.774387 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.788407 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.800215 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.810692 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.814484 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.814505 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.814514 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.814528 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.814539 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.821684 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.831606 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.840563 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.851388 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.862852 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.873102 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:36Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.917196 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.917371 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.917613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.917879 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:36 crc kubenswrapper[4956]: I1211 21:49:36.918089 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:36Z","lastTransitionTime":"2025-12-11T21:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.020467 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.020520 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.020474 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:37 crc kubenswrapper[4956]: E1211 21:49:37.020578 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:37 crc kubenswrapper[4956]: E1211 21:49:37.020628 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:37 crc kubenswrapper[4956]: E1211 21:49:37.020809 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.021177 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.021213 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.021229 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.021248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.021264 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.123693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.123746 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.123757 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.123797 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.123811 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.226701 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.226750 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.226761 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.226797 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.226815 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.329545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.329616 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.329629 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.329644 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.329937 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.432897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.432950 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.432965 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.432985 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.433000 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.535881 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.535915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.535924 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.535939 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.535948 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.639157 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.639198 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.639209 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.639258 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.639272 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.741446 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.741490 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.741501 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.741518 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.741530 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.844889 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.844937 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.844948 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.844960 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.844968 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.947663 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.947746 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.947801 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.947835 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:37 crc kubenswrapper[4956]: I1211 21:49:37.947860 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:37Z","lastTransitionTime":"2025-12-11T21:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.021288 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:38 crc kubenswrapper[4956]: E1211 21:49:38.021457 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.040617 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.050008 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.050062 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.050074 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.050095 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.050106 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.061554 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.077531 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"2025-12-11T21:48:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d\\\\n2025-12-11T21:48:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d to /host/opt/cni/bin/\\\\n2025-12-11T21:48:50Z [verbose] multus-daemon started\\\\n2025-12-11T21:48:50Z [verbose] Readiness Indicator file check\\\\n2025-12-11T21:49:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.092521 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.106161 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.121841 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.140036 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.152531 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.154231 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.154273 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.154286 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.154306 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.154323 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.166723 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.181138 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.197189 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.211984 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.221533 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.232499 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.243617 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.256219 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.257159 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.257203 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.257213 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.257230 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.257240 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.270885 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:38Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.359382 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.359434 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.359451 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.359476 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.359489 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.463484 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.463594 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.463605 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.463624 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.463637 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.566130 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.566405 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.566482 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.566562 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.566623 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.669500 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.669784 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.669850 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.669915 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.670013 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.772698 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.772735 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.772744 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.772758 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.772786 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.875466 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.875509 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.875518 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.875533 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.875542 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.978347 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.978390 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.978401 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.978416 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:38 crc kubenswrapper[4956]: I1211 21:49:38.978426 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:38Z","lastTransitionTime":"2025-12-11T21:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.020639 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:39 crc kubenswrapper[4956]: E1211 21:49:39.020793 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.020849 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.020915 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:39 crc kubenswrapper[4956]: E1211 21:49:39.021008 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:39 crc kubenswrapper[4956]: E1211 21:49:39.021235 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.081911 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.081982 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.081999 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.082024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.082043 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.184899 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.185739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.186078 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.186279 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.186497 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.289071 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.289862 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.289983 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.290111 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.290214 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.393147 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.393181 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.393190 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.393204 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.393213 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.497203 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.497612 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.497913 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.498166 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.498357 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.601536 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.601574 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.601584 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.601597 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.601606 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.703151 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.703187 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.703195 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.703210 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.703218 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.805861 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.805908 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.805919 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.805935 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.805946 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.909143 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.909192 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.909206 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.909235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:39 crc kubenswrapper[4956]: I1211 21:49:39.909249 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:39Z","lastTransitionTime":"2025-12-11T21:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.011415 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.011499 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.011520 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.012134 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.012216 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.021052 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:40 crc kubenswrapper[4956]: E1211 21:49:40.021223 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.118700 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.119256 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.119596 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.119955 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.120157 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.223821 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.223865 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.223877 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.223893 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.223906 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.326843 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.326885 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.326897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.326912 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.326924 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.431428 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.431539 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.431558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.431586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.431608 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.534565 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.534622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.534633 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.534649 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.534660 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.636464 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.636514 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.636528 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.636551 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.636563 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.739270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.739610 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.739735 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.739864 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.739979 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.842677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.842717 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.842726 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.842740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.842748 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.945851 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.945893 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.945902 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.945917 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:40 crc kubenswrapper[4956]: I1211 21:49:40.945928 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:40Z","lastTransitionTime":"2025-12-11T21:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.020816 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:41 crc kubenswrapper[4956]: E1211 21:49:41.020933 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.020821 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.020976 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:41 crc kubenswrapper[4956]: E1211 21:49:41.021075 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:41 crc kubenswrapper[4956]: E1211 21:49:41.021225 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.048345 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.048413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.048431 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.048456 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.048474 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.151058 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.151414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.151567 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.151712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.151850 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.254758 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.254872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.254894 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.254919 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.254936 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.357454 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.357498 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.357510 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.357526 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.357537 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.460038 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.460106 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.460123 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.460147 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.460164 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.563028 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.563085 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.563102 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.563131 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.563151 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.665418 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.665495 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.665512 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.665537 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.665553 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.768822 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.768911 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.768936 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.768963 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.768980 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.871687 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.871726 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.871739 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.871755 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.871797 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.975145 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.975192 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.975212 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.975238 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:41 crc kubenswrapper[4956]: I1211 21:49:41.975257 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:41Z","lastTransitionTime":"2025-12-11T21:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.022638 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.022790 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.077487 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.077524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.077533 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.077548 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.077588 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.179949 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.179982 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.179991 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.180006 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.180016 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.282549 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.282903 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.283010 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.283107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.283213 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.387015 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.387067 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.387084 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.387107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.387123 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.490395 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.490456 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.490472 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.490496 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.490513 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.590864 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.590935 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.590957 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.590979 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.590997 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.619108 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:42Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.625489 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.625716 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.625741 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.625764 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.626331 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.644979 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:42Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.651986 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.652052 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.652070 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.652456 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.652513 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.670970 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:42Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.677069 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.677257 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.677410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.677964 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.678083 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.697184 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:42Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.701042 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.701096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.701106 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.701122 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.701131 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.718924 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:42Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:42 crc kubenswrapper[4956]: E1211 21:49:42.719260 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.721461 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.721517 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.721579 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.721606 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.721671 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.824847 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.824912 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.824948 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.824979 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.824998 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.928635 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.929055 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.929224 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.929377 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:42 crc kubenswrapper[4956]: I1211 21:49:42.929516 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:42Z","lastTransitionTime":"2025-12-11T21:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.021068 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.021079 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.021229 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:43 crc kubenswrapper[4956]: E1211 21:49:43.021412 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:43 crc kubenswrapper[4956]: E1211 21:49:43.022163 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:43 crc kubenswrapper[4956]: E1211 21:49:43.022271 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.022686 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 21:49:43 crc kubenswrapper[4956]: E1211 21:49:43.022984 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\"" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.032528 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.032678 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.032793 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.032880 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.032972 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.136324 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.136826 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.137110 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.137530 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.137966 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.242120 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.242219 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.242242 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.242275 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.242297 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.346376 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.346440 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.346462 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.346488 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.346506 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.449689 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.449748 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.449786 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.449813 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.449829 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.553602 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.553652 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.553666 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.553685 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.553696 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.656263 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.656317 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.656335 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.656358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.656376 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.760440 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.760509 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.760522 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.760541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.760557 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.864261 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.864507 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.864587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.864718 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.864855 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.967582 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.967662 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.967686 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.967714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:43 crc kubenswrapper[4956]: I1211 21:49:43.967756 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:43Z","lastTransitionTime":"2025-12-11T21:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.021126 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:44 crc kubenswrapper[4956]: E1211 21:49:44.021293 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.070317 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.070387 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.070407 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.070433 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.070454 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.177484 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.177529 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.177541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.177558 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.177571 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.281231 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.281270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.281279 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.281294 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.281306 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.384753 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.384854 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.384873 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.384899 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.384918 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.488419 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.488460 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.488470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.488486 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.488495 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.591360 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.591405 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.591416 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.591433 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.591445 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.695327 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.695373 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.695386 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.695403 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.695414 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.798046 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.798113 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.798126 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.798143 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.798154 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.900569 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.900693 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.900716 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.900746 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:44 crc kubenswrapper[4956]: I1211 21:49:44.900811 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:44Z","lastTransitionTime":"2025-12-11T21:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.003872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.003918 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.003927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.003940 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.003949 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.020578 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.020603 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:45 crc kubenswrapper[4956]: E1211 21:49:45.020694 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.020716 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:45 crc kubenswrapper[4956]: E1211 21:49:45.020872 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:45 crc kubenswrapper[4956]: E1211 21:49:45.020902 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.031546 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.106639 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.106682 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.106699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.106720 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.106736 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.209841 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.209887 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.209899 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.209914 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.209924 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.313903 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.313986 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.314007 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.314032 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.314051 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.416491 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.416897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.417022 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.417152 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.417253 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.520499 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.520550 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.520559 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.520578 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.520595 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.623062 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.623097 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.623108 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.623123 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.623132 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.725830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.725896 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.725921 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.725946 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.725964 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.829153 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.829225 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.829242 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.829265 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.829284 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.933060 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.933134 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.933160 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.933190 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:45 crc kubenswrapper[4956]: I1211 21:49:45.933213 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:45Z","lastTransitionTime":"2025-12-11T21:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.021200 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:46 crc kubenswrapper[4956]: E1211 21:49:46.021452 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.035760 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.035831 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.035848 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.035872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.035889 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.139006 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.139357 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.139541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.139844 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.140044 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.243072 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.243309 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.243426 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.243524 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.243614 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.346407 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.346964 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.347154 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.347316 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.347480 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.450340 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.450842 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.451073 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.451390 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.451608 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.555187 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.555281 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.555314 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.555342 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.555363 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.659091 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.659186 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.659212 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.659248 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.659286 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.763235 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.763294 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.763326 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.763355 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.763376 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.866103 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.866154 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.866163 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.866177 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.866186 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.969147 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.969202 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.969223 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.969245 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:46 crc kubenswrapper[4956]: I1211 21:49:46.969260 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:46Z","lastTransitionTime":"2025-12-11T21:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.021124 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.021151 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:47 crc kubenswrapper[4956]: E1211 21:49:47.021306 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.021173 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:47 crc kubenswrapper[4956]: E1211 21:49:47.021384 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:47 crc kubenswrapper[4956]: E1211 21:49:47.021481 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.072231 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.072286 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.072300 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.072319 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.072331 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.175231 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.175270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.175282 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.175296 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.175308 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.278184 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.278218 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.278229 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.278245 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.278255 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.382188 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.382233 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.382245 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.382260 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.382271 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.485114 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.485163 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.485175 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.485198 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.485212 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.587466 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.587493 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.587502 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.587513 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.587522 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.691405 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.691467 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.691483 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.691532 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.691553 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.794399 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.794469 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.794494 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.794523 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.794547 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.897130 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.897173 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.897183 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.897199 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:47 crc kubenswrapper[4956]: I1211 21:49:47.897210 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:47Z","lastTransitionTime":"2025-12-11T21:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:47.999961 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.000018 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.000039 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.000069 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.000090 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.020293 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:48 crc kubenswrapper[4956]: E1211 21:49:48.020431 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.043456 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.060241 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.076211 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"761cf146-79ec-47ec-9469-6776486c3178\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f544cad46d1313a6b2238591bbae146825c7360a076c95bce0ab7eb3bdafa848\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.094459 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.102539 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.102609 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.102634 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.102663 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.102688 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.117497 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.151506 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-v52ql_openshift-ovn-kubernetes(c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.175228 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"2025-12-11T21:48:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d\\\\n2025-12-11T21:48:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d to /host/opt/cni/bin/\\\\n2025-12-11T21:48:50Z [verbose] multus-daemon started\\\\n2025-12-11T21:48:50Z [verbose] Readiness Indicator file check\\\\n2025-12-11T21:49:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.195629 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.205933 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.205995 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.206013 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.206035 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.206053 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.219663 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.239199 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.261108 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.279847 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.297258 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.309730 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.309799 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.309812 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.309829 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.309841 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.315159 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.329987 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.346860 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.363098 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.377964 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:48Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.413546 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.413603 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.413620 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.413643 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.413659 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.516981 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.517044 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.517063 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.517088 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.517105 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.619441 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.619554 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.619569 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.619587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.619599 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.722743 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.723307 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.723424 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.723552 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.723661 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.826677 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.826997 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.827079 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.827158 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.827217 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.930112 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.930152 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.930162 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.930178 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:48 crc kubenswrapper[4956]: I1211 21:49:48.930189 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:48Z","lastTransitionTime":"2025-12-11T21:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.021117 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.021126 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.021170 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:49 crc kubenswrapper[4956]: E1211 21:49:49.021642 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:49 crc kubenswrapper[4956]: E1211 21:49:49.021884 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:49 crc kubenswrapper[4956]: E1211 21:49:49.021998 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.032334 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.032380 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.032424 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.032442 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.032453 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.134956 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.134997 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.135005 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.135020 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.135030 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.239278 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.239697 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.239947 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.240119 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.240267 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.343711 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.343803 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.343843 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.343878 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.343901 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.446176 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.446416 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.446545 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.446641 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.446714 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.549284 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.549329 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.549344 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.549363 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.549393 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.652599 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.652667 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.652694 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.652717 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.652734 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.755348 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.755404 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.755416 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.755433 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.755445 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.858435 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.858498 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.858515 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.858543 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.858559 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.961286 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.961329 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.961338 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.961352 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:49 crc kubenswrapper[4956]: I1211 21:49:49.961362 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:49Z","lastTransitionTime":"2025-12-11T21:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.020892 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:50 crc kubenswrapper[4956]: E1211 21:49:50.021205 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.063273 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.063317 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.063330 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.063348 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.063360 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.166949 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.167024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.167048 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.167075 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.167265 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.270241 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.270305 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.270322 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.270347 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.270366 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.373494 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.373555 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.373575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.373601 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.373625 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.476500 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.476568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.476586 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.476613 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.476631 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.579568 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.579622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.579639 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.579661 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.579679 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.681925 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.681965 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.681980 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.681997 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.682008 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.790285 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.790371 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.790384 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.790420 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.790433 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.893813 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.893873 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.893890 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.893914 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.893931 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.993613 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:49:50 crc kubenswrapper[4956]: E1211 21:49:50.993975 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:54.993950047 +0000 UTC m=+147.438328237 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.996717 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.996839 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.996862 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.996888 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:50 crc kubenswrapper[4956]: I1211 21:49:50.996906 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:50Z","lastTransitionTime":"2025-12-11T21:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.021253 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.021305 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.021270 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.021445 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.021585 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.021722 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.094961 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.095015 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.095041 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.095065 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095098 4956 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095194 4956 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095214 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:50:55.095183139 +0000 UTC m=+147.539561329 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095251 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 21:50:55.095234141 +0000 UTC m=+147.539612291 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095285 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095329 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095361 4956 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.095442 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 21:50:55.095418346 +0000 UTC m=+147.539796546 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.096101 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.096143 4956 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.096167 4956 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:51 crc kubenswrapper[4956]: E1211 21:49:51.096252 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 21:50:55.096228536 +0000 UTC m=+147.540606746 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.099949 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.100013 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.100038 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.100067 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.100089 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.203638 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.203695 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.203713 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.203736 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.203754 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.306978 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.307037 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.307053 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.307075 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.307094 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.410289 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.410330 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.410340 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.410357 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.410366 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.512675 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.512714 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.512724 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.512740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.512750 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.615210 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.615270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.615286 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.615311 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.615331 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.717881 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.717927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.717939 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.717957 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.717970 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.821221 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.821310 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.821332 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.821360 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.821381 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.924255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.924756 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.924809 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.924836 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:51 crc kubenswrapper[4956]: I1211 21:49:51.924854 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:51Z","lastTransitionTime":"2025-12-11T21:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.020736 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.021349 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.030632 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.030752 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.030875 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.030966 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.031001 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.134972 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.135045 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.135060 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.135085 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.135102 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.237957 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.237998 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.238009 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.238026 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.238038 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.340515 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.340581 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.340598 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.340622 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.340648 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.443549 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.443595 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.443607 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.443623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.443634 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.545878 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.545940 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.545953 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.545970 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.545981 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.649204 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.649299 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.649327 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.649359 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.649387 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.752467 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.752535 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.752557 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.752585 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.752607 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.804336 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.804411 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.804436 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.804462 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.804485 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.817648 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.821983 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.822060 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.822080 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.822105 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.822131 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.839400 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.844255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.844405 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.844427 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.844446 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.844491 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.858855 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.863358 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.863420 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.863439 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.863463 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.863481 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.879383 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.883578 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.883615 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.883623 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.883637 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.883647 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.895811 4956 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b13d9006-a946-432b-9df3-08f296d9a158\\\",\\\"systemUUID\\\":\\\"38ee1c6a-2793-48ed-96fb-b9b725b90f32\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:52Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:52 crc kubenswrapper[4956]: E1211 21:49:52.896029 4956 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.897673 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.897712 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.897721 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.897736 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:52 crc kubenswrapper[4956]: I1211 21:49:52.897748 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:52Z","lastTransitionTime":"2025-12-11T21:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.242281 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:54 crc kubenswrapper[4956]: E1211 21:49:54.242407 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.242606 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:54 crc kubenswrapper[4956]: E1211 21:49:54.242665 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.242954 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:54 crc kubenswrapper[4956]: E1211 21:49:54.243077 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.243291 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:54 crc kubenswrapper[4956]: E1211 21:49:54.243403 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.246141 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.246205 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.246228 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.246255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.246279 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.350641 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.350702 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.350722 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.350749 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.350800 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.452857 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.452912 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.452924 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.452939 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.452947 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.554992 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.555040 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.555051 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.555069 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.555081 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.658343 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.658383 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.658394 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.658410 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.658422 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.761362 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.761431 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.761448 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.761521 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.761542 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.864390 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.864446 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.864464 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.864490 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.864507 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.967498 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.967540 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.967575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.967593 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:54 crc kubenswrapper[4956]: I1211 21:49:54.967603 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:54Z","lastTransitionTime":"2025-12-11T21:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.070286 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.070342 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.070353 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.070369 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.070381 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.173133 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.173194 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.173236 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.173261 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.173278 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.276470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.276564 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.276587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.276618 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.276641 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.379612 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.379663 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.379674 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.379691 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.379703 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.481823 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.481872 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.481883 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.481897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.481907 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.583532 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.583570 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.583578 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.583591 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.583601 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.686730 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.686833 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.686851 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.686876 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.686893 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.789459 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.789537 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.789560 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.789590 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.789607 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.893154 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.893249 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.893281 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.893313 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.893334 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.996811 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.996869 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.996916 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.996936 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:55 crc kubenswrapper[4956]: I1211 21:49:55.996951 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:55Z","lastTransitionTime":"2025-12-11T21:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.021297 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.021392 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.021463 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.021311 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:56 crc kubenswrapper[4956]: E1211 21:49:56.021521 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:56 crc kubenswrapper[4956]: E1211 21:49:56.021638 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:56 crc kubenswrapper[4956]: E1211 21:49:56.021822 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:56 crc kubenswrapper[4956]: E1211 21:49:56.022020 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.099980 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.100050 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.100075 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.100107 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.100125 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.203570 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.203626 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.203663 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.203699 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.203726 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.306704 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.306839 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.306860 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.306888 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.306912 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.410844 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.410910 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.410926 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.410956 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.410974 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.513888 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.513972 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.513996 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.514024 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.514045 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.616754 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.616873 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.616897 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.616927 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.616949 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.719401 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.719475 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.719500 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.719528 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.719550 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.823129 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.823169 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.823177 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.823192 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.823201 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.925805 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.925879 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.925900 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.925931 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:56 crc kubenswrapper[4956]: I1211 21:49:56.925954 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:56Z","lastTransitionTime":"2025-12-11T21:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.021509 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.028054 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.028099 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.028115 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.028136 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.028154 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.130671 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.131014 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.131023 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.131038 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.131046 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.233276 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.233312 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.233322 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.233336 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.233345 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.264678 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/2.log" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.266228 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerStarted","Data":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.267236 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.286600 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-p8slf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f5c3105-d748-4563-b3f7-a566d31a3031\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:35Z\\\",\\\"message\\\":\\\"2025-12-11T21:48:50+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d\\\\n2025-12-11T21:48:50+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3f44c368-a0a7-4b1a-a9bc-33d696a6ab3d to /host/opt/cni/bin/\\\\n2025-12-11T21:48:50Z [verbose] multus-daemon started\\\\n2025-12-11T21:48:50Z [verbose] Readiness Indicator file check\\\\n2025-12-11T21:49:35Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bpf2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-p8slf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.298890 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"761cf146-79ec-47ec-9469-6776486c3178\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f544cad46d1313a6b2238591bbae146825c7360a076c95bce0ab7eb3bdafa848\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.311668 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.327800 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.337195 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.337244 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.337255 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.337270 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.337281 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.344518 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-11T21:49:26Z\\\",\\\"message\\\":\\\"as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.898096 6634 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1211 21:49:25.900662 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1211 21:49:25.900034 6634 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1211 21:49:25.900726 6634 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:49:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n6k6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-v52ql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.357108 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.368155 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.379236 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cf61c63b-b06c-4f51-add2-aefe57de751a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1203bd319dc4fff5d3d1be797909bf4d4dc75ce8c851f0fc5da7862d7cfff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bwnrx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-h6mx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.388854 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cz2dx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"afc0240e-109a-48e1-b0fe-4ca9386fce91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e41443f754f583057acf02742e93d971e033b311ad08bc3b749249436969b1ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bn6dr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cz2dx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.415880 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a84156-b48d-4a54-bc7a-21f1f3dbe78f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"espace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1211 21:48:30.994499 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 21:48:30.995721 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-720955698/tls.crt::/tmp/serving-cert-720955698/tls.key\\\\\\\"\\\\nI1211 21:48:46.498427 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 21:48:46.501459 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 21:48:46.501474 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 21:48:46.501524 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 21:48:46.501530 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 21:48:46.513453 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 21:48:46.513524 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 21:48:46.513530 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513561 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 21:48:46.513574 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 21:48:46.513582 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 21:48:46.513593 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 21:48:46.513604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 21:48:46.515407 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:30Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.427898 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ff2cb24-24cf-406e-9d30-a903feb60b46\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eade770f6dd7f56c541d4b2a9ec2ad5d321d91d5c97489fa7d3fdd2ace4dc5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://47ea175ec448102057d81855aa7b64ec0a599ba8b1d7b0a7009034e91fe3fc65\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a3dd37fc769bcf88028fae0fc5093829ad638123226cdc428172414c5e1b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.439069 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.439112 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.439123 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.439141 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.439175 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.439470 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa81f70c3eafdcda0be8e171cf935b646b174fd7a1b22b4b464547baab4d9f37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.457912 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1e32015-9a51-44d0-be08-ecb4f246ddd1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7dc89fdda6e9653addc07f72bc62500419569ba7a115ea3396f1a07519dae349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c52482e59cae165d4851f84d13879c90ac4312289b9513badd9254a90039d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:49:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv6zh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:00Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vzpq5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.482787 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e971de-68ff-47ea-9e79-306b4fb67a8f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c229454d9ab5fd483f45da51be9f8bfcf25e3b5989fd180f77141c1b706a9d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa38c7d7e09aff0be77f317dc020e473fc83549e1d9d71e89ea3a95773ed6a17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a2d4a936cd20db53f471e17e8b237fc279073bb4a2db18e816bad71651a9c21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dddf04a05f555a668de6a259bec6951c9727484c0273dc72de612db46d790f6b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.499201 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d1d603e74ab5964c6ee46d6a5f1dddd64b6ccec9cd967421cb5ad30e12c6197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.513201 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7q7lq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d48b9a21-a626-4b43-9429-59287cc38e3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3b5e277602e0dd0428b923d269eeeffa75946d96a67e8b9e2ae70995dc982f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nh7lf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7q7lq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.536680 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.541392 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.541457 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.541470 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.541506 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.541520 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.549626 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:57Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.644418 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.644475 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.644488 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.644505 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.644518 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.748414 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.748471 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.748488 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.748512 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.748531 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.876073 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.876104 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.876113 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.876128 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.876137 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.978669 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.978716 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.978727 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.978744 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:57 crc kubenswrapper[4956]: I1211 21:49:57.978755 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:57Z","lastTransitionTime":"2025-12-11T21:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.020585 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.020586 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.020657 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.020679 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:58 crc kubenswrapper[4956]: E1211 21:49:58.020907 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:49:58 crc kubenswrapper[4956]: E1211 21:49:58.022303 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:49:58 crc kubenswrapper[4956]: E1211 21:49:58.022618 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:49:58 crc kubenswrapper[4956]: E1211 21:49:58.022924 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.035832 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac73afdfcaa87c3dca895b19d4e95e1f7d6966c69442bc076bbd2b9a6b725fdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddefe661abe8dce99cbff572ec03b95054ce35b22db6ca65564ac95f81a6273\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.048292 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"534554e4-788d-4649-9dfc-ab5fd83d37d9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:49:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-skp9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:49:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fgzkb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.059357 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"761cf146-79ec-47ec-9469-6776486c3178\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f544cad46d1313a6b2238591bbae146825c7360a076c95bce0ab7eb3bdafa848\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78f0f7117e7d88af95b276a49af38ab2400b99d33b9954f9fc50f9fdd5699954\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.070715 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:46Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.081478 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.081515 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.081525 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.081542 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.081553 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.086631 4956 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"59601647-5a77-4d78-9821-73873f2cec46\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T21:48:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c1dcc64c988fc4c0aa012688bd5870f4e8c0b6198e6587dad96d118c617900e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4ab9c3b3db439278207bc04e3cf7e9ac147d2b9d8519907509253c2173367d2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06a68cde552c238e5afa754b1f01766531218cdaf734333ac1d535110ec0a4d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4ca7db28662461cb0fe17db730e0450668e4fc4311fa73f28c944dde1c12cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c038e720c80d70dd9289c2d2cd1b47cf30b1197070e7c0ced369bef38a34554\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c0ac9e39f87b7f165bec5af647c45cf6b1ee9cb888ae69221506ba07d6c0e22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f566effc9caeecdf78f1fa5d865d23f037c69996086fbbb7a77d80e7873b579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T21:48:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T21:48:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5mktk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T21:48:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-tmhkw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T21:49:58Z is after 2025-08-24T17:21:41Z" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.141146 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podStartSLOduration=71.14113015 podStartE2EDuration="1m11.14113015s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.124286564 +0000 UTC m=+90.568664734" watchObservedRunningTime="2025-12-11 21:49:58.14113015 +0000 UTC m=+90.585508300" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.141373 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-p8slf" podStartSLOduration=71.141369806 podStartE2EDuration="1m11.141369806s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.14075899 +0000 UTC m=+90.585137150" watchObservedRunningTime="2025-12-11 21:49:58.141369806 +0000 UTC m=+90.585747956" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.182944 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=69.182924627 podStartE2EDuration="1m9.182924627s" podCreationTimestamp="2025-12-11 21:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.182836495 +0000 UTC m=+90.627214655" watchObservedRunningTime="2025-12-11 21:49:58.182924627 +0000 UTC m=+90.627302777" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.183144 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=71.183138993 podStartE2EDuration="1m11.183138993s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.163859672 +0000 UTC m=+90.608237832" watchObservedRunningTime="2025-12-11 21:49:58.183138993 +0000 UTC m=+90.627517143" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.184314 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.184352 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.184365 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.184382 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.184393 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.249713 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podStartSLOduration=71.249688176 podStartE2EDuration="1m11.249688176s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.246429699 +0000 UTC m=+90.690807889" watchObservedRunningTime="2025-12-11 21:49:58.249688176 +0000 UTC m=+90.694066346" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.281393 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-cz2dx" podStartSLOduration=72.281371785 podStartE2EDuration="1m12.281371785s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.266006598 +0000 UTC m=+90.710384748" watchObservedRunningTime="2025-12-11 21:49:58.281371785 +0000 UTC m=+90.725749935" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.281493 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vzpq5" podStartSLOduration=71.281487968 podStartE2EDuration="1m11.281487968s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.280881452 +0000 UTC m=+90.725259612" watchObservedRunningTime="2025-12-11 21:49:58.281487968 +0000 UTC m=+90.725866118" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.286593 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.286740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.286881 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.287004 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.287135 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.296487 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=41.296474675 podStartE2EDuration="41.296474675s" podCreationTimestamp="2025-12-11 21:49:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.295510399 +0000 UTC m=+90.739888559" watchObservedRunningTime="2025-12-11 21:49:58.296474675 +0000 UTC m=+90.740852835" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.389740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.389830 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.389843 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.389862 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.389875 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.491500 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.491541 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.491552 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.491566 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.491577 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.539491 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-7q7lq" podStartSLOduration=72.539472663 podStartE2EDuration="1m12.539472663s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:49:58.341446876 +0000 UTC m=+90.785825046" watchObservedRunningTime="2025-12-11 21:49:58.539472663 +0000 UTC m=+90.983850813" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.540062 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fgzkb"] Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.540161 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:49:58 crc kubenswrapper[4956]: E1211 21:49:58.540253 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.593740 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.593810 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.593823 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.593841 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.593853 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.695792 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.695839 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.695850 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.695870 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.695881 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.798534 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.798575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.798588 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.798604 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.798616 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.901646 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.901715 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.901738 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.901797 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:58 crc kubenswrapper[4956]: I1211 21:49:58.901816 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:58Z","lastTransitionTime":"2025-12-11T21:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.005096 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.005166 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.005184 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.005214 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.005232 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.107998 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.108039 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.108050 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.108069 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.108081 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.210850 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.210903 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.210911 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.210925 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.210935 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.313575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.313606 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.313614 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.313627 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.313636 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.416502 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.416587 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.416600 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.416617 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.416629 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.519413 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.519456 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.519468 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.519483 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.519496 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.621610 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.621664 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.621678 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.621696 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.621709 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.724575 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.724618 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.724628 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.724642 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.724654 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.828336 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.828401 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.828419 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.828451 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.828476 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.931593 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.931647 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.931657 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.931675 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:49:59 crc kubenswrapper[4956]: I1211 21:49:59.931686 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:49:59Z","lastTransitionTime":"2025-12-11T21:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.020948 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.020979 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.021130 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:00 crc kubenswrapper[4956]: E1211 21:50:00.021220 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.021239 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:50:00 crc kubenswrapper[4956]: E1211 21:50:00.021982 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 21:50:00 crc kubenswrapper[4956]: E1211 21:50:00.022534 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 21:50:00 crc kubenswrapper[4956]: E1211 21:50:00.023305 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fgzkb" podUID="534554e4-788d-4649-9dfc-ab5fd83d37d9" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.034173 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.034227 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.034250 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.034277 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.034297 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:50:00Z","lastTransitionTime":"2025-12-11T21:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.136985 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.137061 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.137081 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.137109 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.137128 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:50:00Z","lastTransitionTime":"2025-12-11T21:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.240323 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.240380 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.240391 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.240407 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.240421 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:50:00Z","lastTransitionTime":"2025-12-11T21:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.342912 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.342965 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.342975 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.342991 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.343001 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:50:00Z","lastTransitionTime":"2025-12-11T21:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.445523 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.445590 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.445607 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.445644 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.445660 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:50:00Z","lastTransitionTime":"2025-12-11T21:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.548148 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.548189 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.548203 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.548218 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.548228 4956 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T21:50:00Z","lastTransitionTime":"2025-12-11T21:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.651450 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.651504 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.651523 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.651539 4956 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.651649 4956 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.708630 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.708991 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-zvhmh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.709183 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4mlhz"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.709459 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.713033 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.713408 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.717225 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.717567 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-ztk92"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.717809 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.718095 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.718338 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.718954 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.724543 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.724957 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.725342 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.726079 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.726537 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.728286 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.729944 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.730474 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.730935 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.731202 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.731602 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.731928 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.732022 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dkst5"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.732436 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.732639 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-f4wrs"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.741359 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.741650 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.742110 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.742337 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.742644 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.742912 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.743456 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.744167 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.745280 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.745360 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755143 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755286 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755332 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755466 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755542 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755552 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755747 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755938 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.755995 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.756238 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.756553 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.756898 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.757334 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.758452 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-9g9fs"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.758684 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759014 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b28md"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759313 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hnrtk"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759360 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759621 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759827 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759852 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.759929 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.760046 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.760250 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.760272 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.760562 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.760806 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.762304 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.762865 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.763789 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.763897 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hq2jl"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.763930 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.764174 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.765809 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.766465 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.766615 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.766726 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.766857 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.767008 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.767186 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.767217 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hsjmq"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.767601 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-fbpg7"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.770933 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.771091 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.771401 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.771663 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zrrsn"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.771711 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.771878 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.775422 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.776097 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.776173 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.776905 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.777955 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.778258 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.778316 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.782458 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.782970 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-4lmpq"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.798793 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.799285 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.800587 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d8mkh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.800683 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.801079 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.801525 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.802135 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.803484 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.816416 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.816628 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.818835 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.820445 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.820667 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.820912 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.821089 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.821417 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.821506 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.821746 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.821886 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822024 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822195 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822290 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822931 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822338 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823242 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823279 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822377 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822442 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823445 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823449 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822504 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822544 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823548 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822668 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822793 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823741 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822924 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822945 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823847 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823904 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822979 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823007 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823095 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823160 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824071 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.823191 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.822751 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824222 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824308 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824418 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824591 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824940 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825105 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-j8hd5"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825450 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825808 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.824635 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825809 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825956 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4mlhz"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825971 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.826195 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.826445 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825312 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825376 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825407 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825431 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.825599 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.828337 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.828808 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.829355 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.829714 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.831291 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.831400 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.831549 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.831592 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.831804 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.833278 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.837405 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.838655 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.839416 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.839552 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.840014 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.852255 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.852959 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.853922 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.855143 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.862561 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.863082 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-jkrgw"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.863307 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.864184 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.864258 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.864329 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.864548 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.864623 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.865209 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-zvhmh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.867139 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ztk92"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.867161 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.867172 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.869030 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.869107 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b28md"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.871236 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6nc5z"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.872077 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hq2jl"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.872095 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.872144 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.873365 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-gkxnv"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.874282 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.874434 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hsjmq"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.875347 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.876130 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.876623 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.878005 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.878871 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dkst5"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.879046 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.880089 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.881368 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.883925 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-f4wrs"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.885439 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-4lmpq"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.886379 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-9g9fs"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.887262 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hnrtk"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.888255 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d8mkh"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.889342 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.890142 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-j8hd5"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.891049 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.891891 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.893142 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.894168 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.894817 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.895805 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zrrsn"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.896908 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-7phqq"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.898533 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.899912 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.899951 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.901098 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6nc5z"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.902670 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.903650 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jkrgw"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.904553 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nh95j"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.905469 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7phqq"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.905584 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.906444 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.907388 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nh95j"] Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.913903 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.933572 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.953946 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.974581 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 21:50:00 crc kubenswrapper[4956]: I1211 21:50:00.998727 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.014172 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.035462 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.054720 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.074065 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.094482 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.113578 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.133973 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.158243 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.173090 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.194700 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.214266 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.234991 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.255019 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.274920 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.295390 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.315596 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.335498 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.356431 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.374894 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.415499 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.434631 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.453870 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.474915 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.495136 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.514132 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.534830 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.555640 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.574936 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.595238 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.614858 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.635469 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.654402 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.682670 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.695048 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.714106 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.734226 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.753742 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.774272 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.794742 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.815330 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.832625 4956 request.go:700] Waited for 1.007441128s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dpackage-server-manager-serving-cert&limit=500&resourceVersion=0 Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.834414 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.854374 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.875149 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.894203 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.914717 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.934948 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.954843 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.975141 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 21:50:01 crc kubenswrapper[4956]: I1211 21:50:01.995175 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.014476 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.021148 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.021206 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.021158 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.021331 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.037342 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.054535 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.074073 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.094400 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.114764 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.135485 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.154858 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.195011 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.215611 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.234849 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.255173 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.274343 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.295197 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.314588 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.335689 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.355999 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.374831 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.395288 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.414879 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.434588 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.455117 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.474581 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.494980 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.514473 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.546293 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.555240 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.574367 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.594953 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.615375 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.635597 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.655510 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.674733 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.694848 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.714584 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.735174 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.754392 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.774919 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.794016 4956 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.814903 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.833104 4956 request.go:700] Waited for 1.812098063s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd/pods Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.855639 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.875958 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.895147 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.914761 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.935567 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.946230 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmtqq\" (UniqueName: \"kubernetes.io/projected/06f990d2-0043-4e1c-9a1d-34c70bc123d4-kube-api-access-bmtqq\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.946469 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-serving-cert\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.946637 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/364d355b-96bc-4ce2-9734-6758414934fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.946849 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.947041 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.947289 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjdzt\" (UniqueName: \"kubernetes.io/projected/1f36b124-c397-4935-82b6-191d83292d1b-kube-api-access-zjdzt\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.947622 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwhrz\" (UniqueName: \"kubernetes.io/projected/2ca85300-63e6-412c-917b-df0c8696dfda-kube-api-access-hwhrz\") pod \"multus-admission-controller-857f4d67dd-4lmpq\" (UID: \"2ca85300-63e6-412c-917b-df0c8696dfda\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.947897 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/df97aa52-8dc9-46d3-932e-545b1c736c9b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.948098 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trnxc\" (UniqueName: \"kubernetes.io/projected/78d9268c-ae46-4117-8674-2a7d107831bd-kube-api-access-trnxc\") pod \"dns-operator-744455d44c-hsjmq\" (UID: \"78d9268c-ae46-4117-8674-2a7d107831bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.948260 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-images\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.948448 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d3311546-a763-4f88-87c2-ea9dc6c5d023-metrics-tls\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.948688 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tzzf\" (UniqueName: \"kubernetes.io/projected/1cd84235-0b8b-43a0-8d10-6324b5759eac-kube-api-access-7tzzf\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.948929 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6nml\" (UniqueName: \"kubernetes.io/projected/3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294-kube-api-access-s6nml\") pod \"downloads-7954f5f757-ztk92\" (UID: \"3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294\") " pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.949120 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/78d9268c-ae46-4117-8674-2a7d107831bd-metrics-tls\") pod \"dns-operator-744455d44c-hsjmq\" (UID: \"78d9268c-ae46-4117-8674-2a7d107831bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.949275 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b7758ba9-d0df-47cc-a703-d69e4e001adf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.949483 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3311546-a763-4f88-87c2-ea9dc6c5d023-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.949700 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b058f1ae-0331-46f7-a1e8-339dbf9a9405-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.949891 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-audit-policies\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.950095 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/364d355b-96bc-4ce2-9734-6758414934fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.950262 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/364d355b-96bc-4ce2-9734-6758414934fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.950412 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-encryption-config\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.950561 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-metrics-certs\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.950735 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06f990d2-0043-4e1c-9a1d-34c70bc123d4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.950936 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-trusted-ca-bundle\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.951093 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c04a25ee-01c0-4fed-b4c5-a9984606786a-serving-cert\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.951255 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.951437 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pw7f\" (UniqueName: \"kubernetes.io/projected/6dae87cb-e091-408e-9b9d-4d45e7797fc5-kube-api-access-7pw7f\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.951608 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.951832 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0190a0a5-2358-4044-b766-f164e0124dab-serving-cert\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.952025 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-certificates\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.952195 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67nk8\" (UniqueName: \"kubernetes.io/projected/62552ab3-7cb2-4f75-8f3a-75d264a50f66-kube-api-access-67nk8\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.952591 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw7z4\" (UniqueName: \"kubernetes.io/projected/62802da2-70ad-46d2-bc51-b9bf3e0b6086-kube-api-access-sw7z4\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.952925 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cd84235-0b8b-43a0-8d10-6324b5759eac-webhook-cert\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.953223 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fab9c443-9e80-4943-bfa0-0902f4377230-serving-cert\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.953506 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-trusted-ca\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.953837 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-encryption-config\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.954238 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-config\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.954527 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/45c3a183-07ab-4339-92b3-97eac03e9601-trusted-ca\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.954826 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcns7\" (UniqueName: \"kubernetes.io/projected/0190a0a5-2358-4044-b766-f164e0124dab-kube-api-access-vcns7\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.954283 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.955098 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.955399 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-client-ca\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.957671 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62552ab3-7cb2-4f75-8f3a-75d264a50f66-serving-cert\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.958000 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.958407 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2ca85300-63e6-412c-917b-df0c8696dfda-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-4lmpq\" (UID: \"2ca85300-63e6-412c-917b-df0c8696dfda\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.959023 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-policies\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.959273 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.959541 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.959808 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b54m7\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-kube-api-access-b54m7\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960128 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f4ef31-1e96-4627-ab11-cc326d624062-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960197 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-serving-cert\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960255 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ng9w\" (UniqueName: \"kubernetes.io/projected/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-kube-api-access-9ng9w\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960339 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8c2ab16f-cc2d-4319-ac87-974565b63c6e-auth-proxy-config\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960378 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/364d355b-96bc-4ce2-9734-6758414934fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960410 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-tls\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960441 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-image-import-ca\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960472 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3311546-a763-4f88-87c2-ea9dc6c5d023-trusted-ca\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960507 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960571 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-config\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960607 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cd84235-0b8b-43a0-8d10-6324b5759eac-apiservice-cert\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960636 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-client-ca\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960669 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960714 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fe381376-cd51-4565-be0a-1fd8a77be7ac-audit-dir\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960761 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960854 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b7758ba9-d0df-47cc-a703-d69e4e001adf-proxy-tls\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960896 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-dir\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960946 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/364d355b-96bc-4ce2-9734-6758414934fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.960997 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjfnv\" (UniqueName: \"kubernetes.io/projected/c04a25ee-01c0-4fed-b4c5-a9984606786a-kube-api-access-sjfnv\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961029 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-serving-cert\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961061 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-config\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961090 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-config\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961119 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-config\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961170 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-service-ca-bundle\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961198 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1cd84235-0b8b-43a0-8d10-6324b5759eac-tmpfs\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961232 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrs5w\" (UniqueName: \"kubernetes.io/projected/fe381376-cd51-4565-be0a-1fd8a77be7ac-kube-api-access-qrs5w\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961479 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961555 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.961656 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45c3a183-07ab-4339-92b3-97eac03e9601-serving-cert\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962009 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-config\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962112 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962173 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962356 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f4ef31-1e96-4627-ab11-cc326d624062-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962519 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-stats-auth\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962572 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962708 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-default-certificate\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962821 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2fhx\" (UniqueName: \"kubernetes.io/projected/74c4dba3-53de-449c-9360-9ec5d8a00b1d-kube-api-access-d2fhx\") pod \"cluster-samples-operator-665b6dd947-2mg62\" (UID: \"74c4dba3-53de-449c-9360-9ec5d8a00b1d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962872 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3b1e309e-6542-43b9-95cc-3197be39a203-audit-dir\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962946 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b058f1ae-0331-46f7-a1e8-339dbf9a9405-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.962993 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-etcd-client\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963025 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-ca\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963055 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b058f1ae-0331-46f7-a1e8-339dbf9a9405-config\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963159 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxqr9\" (UniqueName: \"kubernetes.io/projected/d3311546-a763-4f88-87c2-ea9dc6c5d023-kube-api-access-mxqr9\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963213 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5585p\" (UniqueName: \"kubernetes.io/projected/fab9c443-9e80-4943-bfa0-0902f4377230-kube-api-access-5585p\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963263 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-audit\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963449 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963497 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-etcd-serving-ca\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963566 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963600 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c3a183-07ab-4339-92b3-97eac03e9601-config\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.963734 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhksf\" (UniqueName: \"kubernetes.io/projected/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-kube-api-access-dhksf\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964021 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh2gz\" (UniqueName: \"kubernetes.io/projected/3b1e309e-6542-43b9-95cc-3197be39a203-kube-api-access-xh2gz\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964072 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jxlq\" (UniqueName: \"kubernetes.io/projected/d44ce400-a741-4232-897e-f9e50bc0f894-kube-api-access-2jxlq\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964107 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62552ab3-7cb2-4f75-8f3a-75d264a50f66-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:02 crc kubenswrapper[4956]: E1211 21:50:02.964229 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.464200807 +0000 UTC m=+95.908579067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964295 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-etcd-client\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964351 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/74c4dba3-53de-449c-9360-9ec5d8a00b1d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2mg62\" (UID: \"74c4dba3-53de-449c-9360-9ec5d8a00b1d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964450 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7lmn\" (UniqueName: \"kubernetes.io/projected/05f4ef31-1e96-4627-ab11-cc326d624062-kube-api-access-h7lmn\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964518 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06f990d2-0043-4e1c-9a1d-34c70bc123d4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964605 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d44ce400-a741-4232-897e-f9e50bc0f894-serving-cert\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964655 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c2ab16f-cc2d-4319-ac87-974565b63c6e-config\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964725 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f36b124-c397-4935-82b6-191d83292d1b-service-ca-bundle\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964806 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8c2ab16f-cc2d-4319-ac87-974565b63c6e-machine-approver-tls\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964872 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t8j5\" (UniqueName: \"kubernetes.io/projected/df97aa52-8dc9-46d3-932e-545b1c736c9b-kube-api-access-7t8j5\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.964931 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df97aa52-8dc9-46d3-932e-545b1c736c9b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965061 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-bound-sa-token\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965108 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3b1e309e-6542-43b9-95cc-3197be39a203-node-pullsecrets\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965156 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965201 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df97aa52-8dc9-46d3-932e-545b1c736c9b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965247 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-488sb\" (UniqueName: \"kubernetes.io/projected/8c2ab16f-cc2d-4319-ac87-974565b63c6e-kube-api-access-488sb\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965298 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-client\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965448 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n72g\" (UniqueName: \"kubernetes.io/projected/b7758ba9-d0df-47cc-a703-d69e4e001adf-kube-api-access-7n72g\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965503 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-config\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965561 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-service-ca\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965627 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk7vr\" (UniqueName: \"kubernetes.io/projected/45c3a183-07ab-4339-92b3-97eac03e9601-kube-api-access-bk7vr\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965665 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.965699 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:02 crc kubenswrapper[4956]: I1211 21:50:02.975063 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.066525 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.066718 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fe381376-cd51-4565-be0a-1fd8a77be7ac-audit-dir\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.066824 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.566724573 +0000 UTC m=+96.011102753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.066912 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.066984 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfs6w\" (UniqueName: \"kubernetes.io/projected/0e804c82-ec6a-4d08-bd18-d50942c0d985-kube-api-access-kfs6w\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.066919 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fe381376-cd51-4565-be0a-1fd8a77be7ac-audit-dir\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067049 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b7758ba9-d0df-47cc-a703-d69e4e001adf-proxy-tls\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067094 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-dir\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067128 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/364d355b-96bc-4ce2-9734-6758414934fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067166 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067198 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spnwm\" (UniqueName: \"kubernetes.io/projected/e2f7854d-11cc-4a65-977c-8a1570116842-kube-api-access-spnwm\") pod \"ingress-canary-7phqq\" (UID: \"e2f7854d-11cc-4a65-977c-8a1570116842\") " pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067235 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjfnv\" (UniqueName: \"kubernetes.io/projected/c04a25ee-01c0-4fed-b4c5-a9984606786a-kube-api-access-sjfnv\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067268 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf15492b-35b0-42ad-a13d-540ccaa7dc23-proxy-tls\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067275 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/364d355b-96bc-4ce2-9734-6758414934fc-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067301 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-trusted-ca-bundle\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067334 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-serving-cert\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067370 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-node-bootstrap-token\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067403 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-config\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067436 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-config\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067485 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-dir\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067526 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-config\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067718 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c53073d9-6cbf-4e29-95cb-88254007d7d4-metrics-tls\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067900 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-mountpoint-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.067958 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-service-ca-bundle\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.068931 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1cd84235-0b8b-43a0-8d10-6324b5759eac-tmpfs\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.069462 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-config\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.069606 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrs5w\" (UniqueName: \"kubernetes.io/projected/fe381376-cd51-4565-be0a-1fd8a77be7ac-kube-api-access-qrs5w\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.069742 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.070138 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.070326 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/20336def-d6ab-4203-8957-629a61fec0a7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8r689\" (UID: \"20336def-d6ab-4203-8957-629a61fec0a7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.070507 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-config\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.070609 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071142 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-config\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071454 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45c3a183-07ab-4339-92b3-97eac03e9601-serving-cert\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071568 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1cd84235-0b8b-43a0-8d10-6324b5759eac-tmpfs\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071625 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnn9w\" (UniqueName: \"kubernetes.io/projected/ef684a3a-493f-4116-ae57-a0e732765982-kube-api-access-mnn9w\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071708 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-config\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071765 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ce0f76b3-4ba2-4981-a888-8f659c504f6c-profile-collector-cert\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071862 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.071915 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072016 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f4ef31-1e96-4627-ab11-cc326d624062-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072090 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/998b11ed-322d-49b3-9a3a-79474037d6ea-console-oauth-config\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072160 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9222bc8f-bd0e-40f3-be61-07b4f951adae-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072213 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-stats-auth\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072259 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072308 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbrht\" (UniqueName: \"kubernetes.io/projected/cf15492b-35b0-42ad-a13d-540ccaa7dc23-kube-api-access-sbrht\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072356 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-registration-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072407 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-default-certificate\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072456 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2fhx\" (UniqueName: \"kubernetes.io/projected/74c4dba3-53de-449c-9360-9ec5d8a00b1d-kube-api-access-d2fhx\") pod \"cluster-samples-operator-665b6dd947-2mg62\" (UID: \"74c4dba3-53de-449c-9360-9ec5d8a00b1d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072501 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3b1e309e-6542-43b9-95cc-3197be39a203-audit-dir\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072550 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b058f1ae-0331-46f7-a1e8-339dbf9a9405-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072626 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-etcd-client\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072672 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-ca\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072719 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b058f1ae-0331-46f7-a1e8-339dbf9a9405-config\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072766 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ce0f76b3-4ba2-4981-a888-8f659c504f6c-srv-cert\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072858 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxqr9\" (UniqueName: \"kubernetes.io/projected/d3311546-a763-4f88-87c2-ea9dc6c5d023-kube-api-access-mxqr9\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072907 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5585p\" (UniqueName: \"kubernetes.io/projected/fab9c443-9e80-4943-bfa0-0902f4377230-kube-api-access-5585p\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072942 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-audit\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072979 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073013 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-etcd-serving-ca\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073049 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-plugins-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073097 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073129 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c3a183-07ab-4339-92b3-97eac03e9601-config\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073161 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgwpf\" (UniqueName: \"kubernetes.io/projected/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-kube-api-access-hgwpf\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073201 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jxlq\" (UniqueName: \"kubernetes.io/projected/d44ce400-a741-4232-897e-f9e50bc0f894-kube-api-access-2jxlq\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073250 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62552ab3-7cb2-4f75-8f3a-75d264a50f66-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073320 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhksf\" (UniqueName: \"kubernetes.io/projected/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-kube-api-access-dhksf\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073355 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh2gz\" (UniqueName: \"kubernetes.io/projected/3b1e309e-6542-43b9-95cc-3197be39a203-kube-api-access-xh2gz\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073388 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q6l6\" (UniqueName: \"kubernetes.io/projected/0c3066a4-fecf-4608-be24-c0534bd263cc-kube-api-access-2q6l6\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073418 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c53073d9-6cbf-4e29-95cb-88254007d7d4-config-volume\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073449 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-service-ca\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073484 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/74c4dba3-53de-449c-9360-9ec5d8a00b1d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2mg62\" (UID: \"74c4dba3-53de-449c-9360-9ec5d8a00b1d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073517 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7lmn\" (UniqueName: \"kubernetes.io/projected/05f4ef31-1e96-4627-ab11-cc326d624062-kube-api-access-h7lmn\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073570 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-etcd-client\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073604 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06f990d2-0043-4e1c-9a1d-34c70bc123d4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073637 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b39bb8fb-c84a-48cc-aa65-b992c06a090b-secret-volume\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073682 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073716 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d44ce400-a741-4232-897e-f9e50bc0f894-serving-cert\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073749 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0c3066a4-fecf-4608-be24-c0534bd263cc-signing-key\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073823 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c2ab16f-cc2d-4319-ac87-974565b63c6e-config\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073874 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf15492b-35b0-42ad-a13d-540ccaa7dc23-auth-proxy-config\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073922 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f36b124-c397-4935-82b6-191d83292d1b-service-ca-bundle\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073956 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8c2ab16f-cc2d-4319-ac87-974565b63c6e-machine-approver-tls\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073993 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t8j5\" (UniqueName: \"kubernetes.io/projected/df97aa52-8dc9-46d3-932e-545b1c736c9b-kube-api-access-7t8j5\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074025 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df97aa52-8dc9-46d3-932e-545b1c736c9b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074058 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df97aa52-8dc9-46d3-932e-545b1c736c9b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074089 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-certs\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074124 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-bound-sa-token\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074156 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3b1e309e-6542-43b9-95cc-3197be39a203-node-pullsecrets\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074195 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074242 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-488sb\" (UniqueName: \"kubernetes.io/projected/8c2ab16f-cc2d-4319-ac87-974565b63c6e-kube-api-access-488sb\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074285 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-client\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074337 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2d8a22dd-465c-4327-8d76-782e5d289942-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wnxdh\" (UID: \"2d8a22dd-465c-4327-8d76-782e5d289942\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074385 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9z76\" (UniqueName: \"kubernetes.io/projected/ce0f76b3-4ba2-4981-a888-8f659c504f6c-kube-api-access-j9z76\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074430 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-config\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074475 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-service-ca\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074518 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk7vr\" (UniqueName: \"kubernetes.io/projected/45c3a183-07ab-4339-92b3-97eac03e9601-kube-api-access-bk7vr\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074590 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074655 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n72g\" (UniqueName: \"kubernetes.io/projected/b7758ba9-d0df-47cc-a703-d69e4e001adf-kube-api-access-7n72g\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074716 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-config\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074752 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-config\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074763 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074851 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wllvv\" (UniqueName: \"kubernetes.io/projected/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-kube-api-access-wllvv\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074900 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-console-config\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.074948 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/364d355b-96bc-4ce2-9734-6758414934fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075001 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-serving-cert\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.075044 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.575023313 +0000 UTC m=+96.019401503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075093 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmtqq\" (UniqueName: \"kubernetes.io/projected/06f990d2-0043-4e1c-9a1d-34c70bc123d4-kube-api-access-bmtqq\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075141 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef684a3a-493f-4116-ae57-a0e732765982-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075179 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/df97aa52-8dc9-46d3-932e-545b1c736c9b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075240 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075288 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjdzt\" (UniqueName: \"kubernetes.io/projected/1f36b124-c397-4935-82b6-191d83292d1b-kube-api-access-zjdzt\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075327 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwhrz\" (UniqueName: \"kubernetes.io/projected/2ca85300-63e6-412c-917b-df0c8696dfda-kube-api-access-hwhrz\") pod \"multus-admission-controller-857f4d67dd-4lmpq\" (UID: \"2ca85300-63e6-412c-917b-df0c8696dfda\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075364 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075404 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lg4p\" (UniqueName: \"kubernetes.io/projected/c53073d9-6cbf-4e29-95cb-88254007d7d4-kube-api-access-6lg4p\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075445 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b39bb8fb-c84a-48cc-aa65-b992c06a090b-config-volume\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075485 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trnxc\" (UniqueName: \"kubernetes.io/projected/78d9268c-ae46-4117-8674-2a7d107831bd-kube-api-access-trnxc\") pod \"dns-operator-744455d44c-hsjmq\" (UID: \"78d9268c-ae46-4117-8674-2a7d107831bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075524 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/998b11ed-322d-49b3-9a3a-79474037d6ea-console-serving-cert\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075557 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9222bc8f-bd0e-40f3-be61-07b4f951adae-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075593 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-images\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075627 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d3311546-a763-4f88-87c2-ea9dc6c5d023-metrics-tls\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075662 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9222bc8f-bd0e-40f3-be61-07b4f951adae-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075695 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef684a3a-493f-4116-ae57-a0e732765982-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075766 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tzzf\" (UniqueName: \"kubernetes.io/projected/1cd84235-0b8b-43a0-8d10-6324b5759eac-kube-api-access-7tzzf\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075835 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6nml\" (UniqueName: \"kubernetes.io/projected/3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294-kube-api-access-s6nml\") pod \"downloads-7954f5f757-ztk92\" (UID: \"3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294\") " pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075869 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl2dd\" (UniqueName: \"kubernetes.io/projected/20336def-d6ab-4203-8957-629a61fec0a7-kube-api-access-nl2dd\") pod \"package-server-manager-789f6589d5-8r689\" (UID: \"20336def-d6ab-4203-8957-629a61fec0a7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075908 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7nmv\" (UniqueName: \"kubernetes.io/projected/2d03e7cc-e66c-4be7-a167-6e8619011299-kube-api-access-v7nmv\") pod \"migrator-59844c95c7-7fvb4\" (UID: \"2d03e7cc-e66c-4be7-a167-6e8619011299\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075942 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/78d9268c-ae46-4117-8674-2a7d107831bd-metrics-tls\") pod \"dns-operator-744455d44c-hsjmq\" (UID: \"78d9268c-ae46-4117-8674-2a7d107831bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.075980 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b7758ba9-d0df-47cc-a703-d69e4e001adf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076014 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3311546-a763-4f88-87c2-ea9dc6c5d023-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076049 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b058f1ae-0331-46f7-a1e8-339dbf9a9405-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076129 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-srv-cert\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076170 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-audit-policies\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076207 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/364d355b-96bc-4ce2-9734-6758414934fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076252 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/364d355b-96bc-4ce2-9734-6758414934fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076289 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cf15492b-35b0-42ad-a13d-540ccaa7dc23-images\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076325 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-socket-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076362 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-metrics-certs\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076404 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-encryption-config\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076439 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06f990d2-0043-4e1c-9a1d-34c70bc123d4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076473 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076509 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pw7f\" (UniqueName: \"kubernetes.io/projected/6dae87cb-e091-408e-9b9d-4d45e7797fc5-kube-api-access-7pw7f\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076543 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0c3066a4-fecf-4608-be24-c0534bd263cc-signing-cabundle\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076580 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-trusted-ca-bundle\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076690 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c04a25ee-01c0-4fed-b4c5-a9984606786a-serving-cert\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076733 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076797 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0190a0a5-2358-4044-b766-f164e0124dab-serving-cert\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076841 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-certificates\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076876 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw7z4\" (UniqueName: \"kubernetes.io/projected/62802da2-70ad-46d2-bc51-b9bf3e0b6086-kube-api-access-sw7z4\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076910 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cd84235-0b8b-43a0-8d10-6324b5759eac-webhook-cert\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076950 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67nk8\" (UniqueName: \"kubernetes.io/projected/62552ab3-7cb2-4f75-8f3a-75d264a50f66-kube-api-access-67nk8\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.076984 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-oauth-serving-cert\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077037 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fab9c443-9e80-4943-bfa0-0902f4377230-serving-cert\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077071 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2f7854d-11cc-4a65-977c-8a1570116842-cert\") pod \"ingress-canary-7phqq\" (UID: \"e2f7854d-11cc-4a65-977c-8a1570116842\") " pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077106 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-trusted-ca\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077140 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-encryption-config\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077175 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-config\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077231 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/45c3a183-07ab-4339-92b3-97eac03e9601-trusted-ca\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077267 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcns7\" (UniqueName: \"kubernetes.io/projected/0190a0a5-2358-4044-b766-f164e0124dab-kube-api-access-vcns7\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077304 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077342 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-client-ca\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077346 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c2ab16f-cc2d-4319-ac87-974565b63c6e-config\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077375 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62552ab3-7cb2-4f75-8f3a-75d264a50f66-serving-cert\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.072868 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-service-ca-bundle\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077415 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077469 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2ca85300-63e6-412c-917b-df0c8696dfda-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-4lmpq\" (UID: \"2ca85300-63e6-412c-917b-df0c8696dfda\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077505 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-policies\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077540 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077577 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-csi-data-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077612 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvnsf\" (UniqueName: \"kubernetes.io/projected/998b11ed-322d-49b3-9a3a-79474037d6ea-kube-api-access-fvnsf\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077652 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077690 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b54m7\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-kube-api-access-b54m7\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077727 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ng9w\" (UniqueName: \"kubernetes.io/projected/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-kube-api-access-9ng9w\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077762 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8c2ab16f-cc2d-4319-ac87-974565b63c6e-auth-proxy-config\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077888 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/364d355b-96bc-4ce2-9734-6758414934fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077927 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f4ef31-1e96-4627-ab11-cc326d624062-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.077962 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-serving-cert\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078003 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtchc\" (UniqueName: \"kubernetes.io/projected/2d8a22dd-465c-4327-8d76-782e5d289942-kube-api-access-dtchc\") pod \"control-plane-machine-set-operator-78cbb6b69f-wnxdh\" (UID: \"2d8a22dd-465c-4327-8d76-782e5d289942\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078042 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-tls\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078078 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-image-import-ca\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078112 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3311546-a763-4f88-87c2-ea9dc6c5d023-trusted-ca\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078151 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078187 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078224 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt4wm\" (UniqueName: \"kubernetes.io/projected/b39bb8fb-c84a-48cc-aa65-b992c06a090b-kube-api-access-qt4wm\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078263 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-config\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078297 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cd84235-0b8b-43a0-8d10-6324b5759eac-apiservice-cert\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.078331 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-client-ca\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.079253 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1f36b124-c397-4935-82b6-191d83292d1b-service-ca-bundle\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.080008 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3b1e309e-6542-43b9-95cc-3197be39a203-node-pullsecrets\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.085335 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45c3a183-07ab-4339-92b3-97eac03e9601-serving-cert\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.086194 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-serving-cert\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.086732 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-service-ca\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.073712 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.090025 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3b1e309e-6542-43b9-95cc-3197be39a203-audit-dir\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.090170 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-audit\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.090517 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62552ab3-7cb2-4f75-8f3a-75d264a50f66-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.091748 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.092245 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-client-ca\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.093148 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-etcd-serving-ca\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.093861 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-config\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.094763 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-client-ca\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.094947 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/364d355b-96bc-4ce2-9734-6758414934fc-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.095513 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-stats-auth\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.095765 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-etcd-client\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.096268 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.097016 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.098331 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.099454 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.099735 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-trusted-ca-bundle\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.100578 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-default-certificate\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.101637 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06f990d2-0043-4e1c-9a1d-34c70bc123d4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.103349 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c3a183-07ab-4339-92b3-97eac03e9601-config\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.103639 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d44ce400-a741-4232-897e-f9e50bc0f894-serving-cert\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.104103 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.104214 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/df97aa52-8dc9-46d3-932e-545b1c736c9b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.105513 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.106298 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8c2ab16f-cc2d-4319-ac87-974565b63c6e-machine-approver-tls\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.106432 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06f990d2-0043-4e1c-9a1d-34c70bc123d4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.102535 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/74c4dba3-53de-449c-9360-9ec5d8a00b1d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2mg62\" (UID: \"74c4dba3-53de-449c-9360-9ec5d8a00b1d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.107697 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f36b124-c397-4935-82b6-191d83292d1b-metrics-certs\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.108970 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.109636 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-policies\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.110504 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.107456 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.111567 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b058f1ae-0331-46f7-a1e8-339dbf9a9405-config\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.112185 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-ca\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.112562 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df97aa52-8dc9-46d3-932e-545b1c736c9b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.113048 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8c2ab16f-cc2d-4319-ac87-974565b63c6e-auth-proxy-config\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.113200 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-config\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.113229 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05f4ef31-1e96-4627-ab11-cc326d624062-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.113295 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cd84235-0b8b-43a0-8d10-6324b5759eac-webhook-cert\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.113665 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-certificates\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.113840 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-serving-cert\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.114473 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d44ce400-a741-4232-897e-f9e50bc0f894-etcd-client\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.114588 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b7758ba9-d0df-47cc-a703-d69e4e001adf-proxy-tls\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.115860 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fab9c443-9e80-4943-bfa0-0902f4377230-serving-cert\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.116949 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-audit-policies\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.118498 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/364d355b-96bc-4ce2-9734-6758414934fc-service-ca\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.120255 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-trusted-ca\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.122318 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/45c3a183-07ab-4339-92b3-97eac03e9601-trusted-ca\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.124120 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.124725 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3b1e309e-6542-43b9-95cc-3197be39a203-image-import-ca\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.125497 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.125968 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c04a25ee-01c0-4fed-b4c5-a9984606786a-serving-cert\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.126264 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-etcd-client\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.126655 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.126692 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fe381376-cd51-4565-be0a-1fd8a77be7ac-encryption-config\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.127356 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05f4ef31-1e96-4627-ab11-cc326d624062-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.128103 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fe381376-cd51-4565-be0a-1fd8a77be7ac-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.128213 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3311546-a763-4f88-87c2-ea9dc6c5d023-trusted-ca\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.128441 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.129186 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.130071 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c04a25ee-01c0-4fed-b4c5-a9984606786a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.130346 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62552ab3-7cb2-4f75-8f3a-75d264a50f66-serving-cert\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.131163 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-images\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.131370 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3b1e309e-6542-43b9-95cc-3197be39a203-encryption-config\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.131381 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrs5w\" (UniqueName: \"kubernetes.io/projected/fe381376-cd51-4565-be0a-1fd8a77be7ac-kube-api-access-qrs5w\") pod \"apiserver-7bbb656c7d-6rtsw\" (UID: \"fe381376-cd51-4565-be0a-1fd8a77be7ac\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.131435 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.131797 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b7758ba9-d0df-47cc-a703-d69e4e001adf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.132380 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d3311546-a763-4f88-87c2-ea9dc6c5d023-metrics-tls\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.132391 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0190a0a5-2358-4044-b766-f164e0124dab-serving-cert\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.132557 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/364d355b-96bc-4ce2-9734-6758414934fc-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.132614 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjfnv\" (UniqueName: \"kubernetes.io/projected/c04a25ee-01c0-4fed-b4c5-a9984606786a-kube-api-access-sjfnv\") pod \"authentication-operator-69f744f599-9g9fs\" (UID: \"c04a25ee-01c0-4fed-b4c5-a9984606786a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.133038 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/78d9268c-ae46-4117-8674-2a7d107831bd-metrics-tls\") pod \"dns-operator-744455d44c-hsjmq\" (UID: \"78d9268c-ae46-4117-8674-2a7d107831bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.133172 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-serving-cert\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.133303 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2ca85300-63e6-412c-917b-df0c8696dfda-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-4lmpq\" (UID: \"2ca85300-63e6-412c-917b-df0c8696dfda\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.133338 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.133467 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-tls\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.134126 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-config\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.135314 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b058f1ae-0331-46f7-a1e8-339dbf9a9405-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.135529 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cd84235-0b8b-43a0-8d10-6324b5759eac-apiservice-cert\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.155905 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-488sb\" (UniqueName: \"kubernetes.io/projected/8c2ab16f-cc2d-4319-ac87-974565b63c6e-kube-api-access-488sb\") pod \"machine-approver-56656f9798-9m9dx\" (UID: \"8c2ab16f-cc2d-4319-ac87-974565b63c6e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.172183 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df97aa52-8dc9-46d3-932e-545b1c736c9b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.182757 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.682737096 +0000 UTC m=+96.127115246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.182800 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183395 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q6l6\" (UniqueName: \"kubernetes.io/projected/0c3066a4-fecf-4608-be24-c0534bd263cc-kube-api-access-2q6l6\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183577 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c53073d9-6cbf-4e29-95cb-88254007d7d4-config-volume\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183663 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-service-ca\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183750 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b39bb8fb-c84a-48cc-aa65-b992c06a090b-secret-volume\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183809 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183846 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0c3066a4-fecf-4608-be24-c0534bd263cc-signing-key\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183891 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf15492b-35b0-42ad-a13d-540ccaa7dc23-auth-proxy-config\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183936 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-certs\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.183974 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2d8a22dd-465c-4327-8d76-782e5d289942-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wnxdh\" (UID: \"2d8a22dd-465c-4327-8d76-782e5d289942\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184022 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9z76\" (UniqueName: \"kubernetes.io/projected/ce0f76b3-4ba2-4981-a888-8f659c504f6c-kube-api-access-j9z76\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184068 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-config\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184144 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wllvv\" (UniqueName: \"kubernetes.io/projected/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-kube-api-access-wllvv\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184196 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-console-config\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184262 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef684a3a-493f-4116-ae57-a0e732765982-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184337 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c53073d9-6cbf-4e29-95cb-88254007d7d4-config-volume\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184343 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lg4p\" (UniqueName: \"kubernetes.io/projected/c53073d9-6cbf-4e29-95cb-88254007d7d4-kube-api-access-6lg4p\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184389 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b39bb8fb-c84a-48cc-aa65-b992c06a090b-config-volume\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184418 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/998b11ed-322d-49b3-9a3a-79474037d6ea-console-serving-cert\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184437 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9222bc8f-bd0e-40f3-be61-07b4f951adae-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184453 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9222bc8f-bd0e-40f3-be61-07b4f951adae-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184492 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef684a3a-493f-4116-ae57-a0e732765982-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184514 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl2dd\" (UniqueName: \"kubernetes.io/projected/20336def-d6ab-4203-8957-629a61fec0a7-kube-api-access-nl2dd\") pod \"package-server-manager-789f6589d5-8r689\" (UID: \"20336def-d6ab-4203-8957-629a61fec0a7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184538 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7nmv\" (UniqueName: \"kubernetes.io/projected/2d03e7cc-e66c-4be7-a167-6e8619011299-kube-api-access-v7nmv\") pod \"migrator-59844c95c7-7fvb4\" (UID: \"2d03e7cc-e66c-4be7-a167-6e8619011299\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184562 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cf15492b-35b0-42ad-a13d-540ccaa7dc23-images\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184578 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-srv-cert\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184597 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-socket-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184621 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0c3066a4-fecf-4608-be24-c0534bd263cc-signing-cabundle\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184684 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-oauth-serving-cert\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184709 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2f7854d-11cc-4a65-977c-8a1570116842-cert\") pod \"ingress-canary-7phqq\" (UID: \"e2f7854d-11cc-4a65-977c-8a1570116842\") " pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184738 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-csi-data-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184754 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvnsf\" (UniqueName: \"kubernetes.io/projected/998b11ed-322d-49b3-9a3a-79474037d6ea-kube-api-access-fvnsf\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184810 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtchc\" (UniqueName: \"kubernetes.io/projected/2d8a22dd-465c-4327-8d76-782e5d289942-kube-api-access-dtchc\") pod \"control-plane-machine-set-operator-78cbb6b69f-wnxdh\" (UID: \"2d8a22dd-465c-4327-8d76-782e5d289942\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184835 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt4wm\" (UniqueName: \"kubernetes.io/projected/b39bb8fb-c84a-48cc-aa65-b992c06a090b-kube-api-access-qt4wm\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184851 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfs6w\" (UniqueName: \"kubernetes.io/projected/0e804c82-ec6a-4d08-bd18-d50942c0d985-kube-api-access-kfs6w\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184869 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184886 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf15492b-35b0-42ad-a13d-540ccaa7dc23-proxy-tls\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184901 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-trusted-ca-bundle\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184930 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spnwm\" (UniqueName: \"kubernetes.io/projected/e2f7854d-11cc-4a65-977c-8a1570116842-kube-api-access-spnwm\") pod \"ingress-canary-7phqq\" (UID: \"e2f7854d-11cc-4a65-977c-8a1570116842\") " pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184947 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-node-bootstrap-token\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184966 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c53073d9-6cbf-4e29-95cb-88254007d7d4-metrics-tls\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.184981 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-mountpoint-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185007 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/20336def-d6ab-4203-8957-629a61fec0a7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8r689\" (UID: \"20336def-d6ab-4203-8957-629a61fec0a7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185023 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185041 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnn9w\" (UniqueName: \"kubernetes.io/projected/ef684a3a-493f-4116-ae57-a0e732765982-kube-api-access-mnn9w\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185059 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ce0f76b3-4ba2-4981-a888-8f659c504f6c-profile-collector-cert\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185078 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/998b11ed-322d-49b3-9a3a-79474037d6ea-console-oauth-config\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185093 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9222bc8f-bd0e-40f3-be61-07b4f951adae-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185119 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbrht\" (UniqueName: \"kubernetes.io/projected/cf15492b-35b0-42ad-a13d-540ccaa7dc23-kube-api-access-sbrht\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185112 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-service-ca\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185136 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-registration-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185170 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ce0f76b3-4ba2-4981-a888-8f659c504f6c-srv-cert\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185189 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-plugins-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185210 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.185228 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgwpf\" (UniqueName: \"kubernetes.io/projected/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-kube-api-access-hgwpf\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.186497 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b39bb8fb-c84a-48cc-aa65-b992c06a090b-config-volume\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.188414 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b39bb8fb-c84a-48cc-aa65-b992c06a090b-secret-volume\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.188467 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2d8a22dd-465c-4327-8d76-782e5d289942-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-wnxdh\" (UID: \"2d8a22dd-465c-4327-8d76-782e5d289942\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.189625 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-config\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.190105 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf15492b-35b0-42ad-a13d-540ccaa7dc23-proxy-tls\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.190852 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-trusted-ca-bundle\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.190900 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-console-config\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.191618 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/998b11ed-322d-49b3-9a3a-79474037d6ea-console-serving-cert\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.191913 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef684a3a-493f-4116-ae57-a0e732765982-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.192762 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.194155 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-node-bootstrap-token\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.194830 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9222bc8f-bd0e-40f3-be61-07b4f951adae-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.195347 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9222bc8f-bd0e-40f3-be61-07b4f951adae-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.195923 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/0c3066a4-fecf-4608-be24-c0534bd263cc-signing-cabundle\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.196330 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c53073d9-6cbf-4e29-95cb-88254007d7d4-metrics-tls\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.199183 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-bound-sa-token\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.199412 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf15492b-35b0-42ad-a13d-540ccaa7dc23-auth-proxy-config\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.199545 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-srv-cert\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.200155 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-csi-data-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.200291 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-mountpoint-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.200437 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/998b11ed-322d-49b3-9a3a-79474037d6ea-oauth-serving-cert\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.200762 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cf15492b-35b0-42ad-a13d-540ccaa7dc23-images\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.201252 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-registration-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.201294 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-socket-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.201465 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.701453022 +0000 UTC m=+96.145831172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.209481 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/998b11ed-322d-49b3-9a3a-79474037d6ea-console-oauth-config\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.209605 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef684a3a-493f-4116-ae57-a0e732765982-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.210645 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ce0f76b3-4ba2-4981-a888-8f659c504f6c-profile-collector-cert\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.211299 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2f7854d-11cc-4a65-977c-8a1570116842-cert\") pod \"ingress-canary-7phqq\" (UID: \"e2f7854d-11cc-4a65-977c-8a1570116842\") " pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.211570 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/0c3066a4-fecf-4608-be24-c0534bd263cc-signing-key\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.212300 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0e804c82-ec6a-4d08-bd18-d50942c0d985-plugins-dir\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.210436 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ce0f76b3-4ba2-4981-a888-8f659c504f6c-srv-cert\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.216377 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.216711 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/20336def-d6ab-4203-8957-629a61fec0a7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8r689\" (UID: \"20336def-d6ab-4203-8957-629a61fec0a7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.216890 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-certs\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.219195 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jxlq\" (UniqueName: \"kubernetes.io/projected/d44ce400-a741-4232-897e-f9e50bc0f894-kube-api-access-2jxlq\") pod \"etcd-operator-b45778765-zrrsn\" (UID: \"d44ce400-a741-4232-897e-f9e50bc0f894\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.227817 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t8j5\" (UniqueName: \"kubernetes.io/projected/df97aa52-8dc9-46d3-932e-545b1c736c9b-kube-api-access-7t8j5\") pod \"cluster-image-registry-operator-dc59b4c8b-wm8v7\" (UID: \"df97aa52-8dc9-46d3-932e-545b1c736c9b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.251451 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxqr9\" (UniqueName: \"kubernetes.io/projected/d3311546-a763-4f88-87c2-ea9dc6c5d023-kube-api-access-mxqr9\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.273534 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7lmn\" (UniqueName: \"kubernetes.io/projected/05f4ef31-1e96-4627-ab11-cc326d624062-kube-api-access-h7lmn\") pod \"openshift-apiserver-operator-796bbdcf4f-9dkxr\" (UID: \"05f4ef31-1e96-4627-ab11-cc326d624062\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.286006 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.286143 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.786111995 +0000 UTC m=+96.230490165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.286449 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.286833 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.786819813 +0000 UTC m=+96.231198023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.292963 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5585p\" (UniqueName: \"kubernetes.io/projected/fab9c443-9e80-4943-bfa0-0902f4377230-kube-api-access-5585p\") pod \"route-controller-manager-6576b87f9c-nx88d\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.297992 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.307122 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.309534 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk7vr\" (UniqueName: \"kubernetes.io/projected/45c3a183-07ab-4339-92b3-97eac03e9601-kube-api-access-bk7vr\") pod \"console-operator-58897d9998-4mlhz\" (UID: \"45c3a183-07ab-4339-92b3-97eac03e9601\") " pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.333089 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.334199 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n72g\" (UniqueName: \"kubernetes.io/projected/b7758ba9-d0df-47cc-a703-d69e4e001adf-kube-api-access-7n72g\") pod \"machine-config-controller-84d6567774-jzbdh\" (UID: \"b7758ba9-d0df-47cc-a703-d69e4e001adf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.340634 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.346152 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.358279 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2fhx\" (UniqueName: \"kubernetes.io/projected/74c4dba3-53de-449c-9360-9ec5d8a00b1d-kube-api-access-d2fhx\") pod \"cluster-samples-operator-665b6dd947-2mg62\" (UID: \"74c4dba3-53de-449c-9360-9ec5d8a00b1d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.413471 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.413798 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.913754006 +0000 UTC m=+96.358132166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.413334 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.414278 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.414691 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:03.91467666 +0000 UTC m=+96.359054820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.416236 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b058f1ae-0331-46f7-a1e8-339dbf9a9405-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-7pmjk\" (UID: \"b058f1ae-0331-46f7-a1e8-339dbf9a9405\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.424086 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.425119 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.435690 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwhrz\" (UniqueName: \"kubernetes.io/projected/2ca85300-63e6-412c-917b-df0c8696dfda-kube-api-access-hwhrz\") pod \"multus-admission-controller-857f4d67dd-4lmpq\" (UID: \"2ca85300-63e6-412c-917b-df0c8696dfda\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.436645 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjdzt\" (UniqueName: \"kubernetes.io/projected/1f36b124-c397-4935-82b6-191d83292d1b-kube-api-access-zjdzt\") pod \"router-default-5444994796-fbpg7\" (UID: \"1f36b124-c397-4935-82b6-191d83292d1b\") " pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.437375 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcns7\" (UniqueName: \"kubernetes.io/projected/0190a0a5-2358-4044-b766-f164e0124dab-kube-api-access-vcns7\") pod \"controller-manager-879f6c89f-zvhmh\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.439153 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.456668 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh2gz\" (UniqueName: \"kubernetes.io/projected/3b1e309e-6542-43b9-95cc-3197be39a203-kube-api-access-xh2gz\") pod \"apiserver-76f77b778f-f4wrs\" (UID: \"3b1e309e-6542-43b9-95cc-3197be39a203\") " pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.474406 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhksf\" (UniqueName: \"kubernetes.io/projected/99b3c2ef-0c86-427c-9c97-f4a9221b69b1-kube-api-access-dhksf\") pod \"machine-api-operator-5694c8668f-dkst5\" (UID: \"99b3c2ef-0c86-427c-9c97-f4a9221b69b1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.541198 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.541199 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.541466 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.041440988 +0000 UTC m=+96.485819138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.541650 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.542072 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.042057454 +0000 UTC m=+96.486435604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.608063 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.624799 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.628686 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.642675 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.643141 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.143124762 +0000 UTC m=+96.587502912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.661619 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.690851 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.718570 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b54m7\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-kube-api-access-b54m7\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.719311 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67nk8\" (UniqueName: \"kubernetes.io/projected/62552ab3-7cb2-4f75-8f3a-75d264a50f66-kube-api-access-67nk8\") pod \"openshift-config-operator-7777fb866f-vzvmj\" (UID: \"62552ab3-7cb2-4f75-8f3a-75d264a50f66\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.735640 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw7z4\" (UniqueName: \"kubernetes.io/projected/62802da2-70ad-46d2-bc51-b9bf3e0b6086-kube-api-access-sw7z4\") pod \"marketplace-operator-79b997595-d8mkh\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.736081 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ng9w\" (UniqueName: \"kubernetes.io/projected/e5d35c7f-7d08-4c15-a193-867b0b8ea71e-kube-api-access-9ng9w\") pod \"service-ca-operator-777779d784-nrqrs\" (UID: \"e5d35c7f-7d08-4c15-a193-867b0b8ea71e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.736749 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6nml\" (UniqueName: \"kubernetes.io/projected/3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294-kube-api-access-s6nml\") pod \"downloads-7954f5f757-ztk92\" (UID: \"3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294\") " pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.738407 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tzzf\" (UniqueName: \"kubernetes.io/projected/1cd84235-0b8b-43a0-8d10-6324b5759eac-kube-api-access-7tzzf\") pod \"packageserver-d55dfcdfc-z6m7g\" (UID: \"1cd84235-0b8b-43a0-8d10-6324b5759eac\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.739008 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.741164 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmtqq\" (UniqueName: \"kubernetes.io/projected/06f990d2-0043-4e1c-9a1d-34c70bc123d4-kube-api-access-bmtqq\") pod \"openshift-controller-manager-operator-756b6f6bc6-5k9nv\" (UID: \"06f990d2-0043-4e1c-9a1d-34c70bc123d4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.742983 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q6l6\" (UniqueName: \"kubernetes.io/projected/0c3066a4-fecf-4608-be24-c0534bd263cc-kube-api-access-2q6l6\") pod \"service-ca-9c57cc56f-j8hd5\" (UID: \"0c3066a4-fecf-4608-be24-c0534bd263cc\") " pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.743650 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.743977 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.243966614 +0000 UTC m=+96.688344764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.745617 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.746091 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3311546-a763-4f88-87c2-ea9dc6c5d023-bound-sa-token\") pod \"ingress-operator-5b745b69d9-b28md\" (UID: \"d3311546-a763-4f88-87c2-ea9dc6c5d023\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.750275 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/364d355b-96bc-4ce2-9734-6758414934fc-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-8tg54\" (UID: \"364d355b-96bc-4ce2-9734-6758414934fc\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.756106 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trnxc\" (UniqueName: \"kubernetes.io/projected/78d9268c-ae46-4117-8674-2a7d107831bd-kube-api-access-trnxc\") pod \"dns-operator-744455d44c-hsjmq\" (UID: \"78d9268c-ae46-4117-8674-2a7d107831bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.757414 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pw7f\" (UniqueName: \"kubernetes.io/projected/6dae87cb-e091-408e-9b9d-4d45e7797fc5-kube-api-access-7pw7f\") pod \"oauth-openshift-558db77b4-hq2jl\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.777508 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.780819 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lg4p\" (UniqueName: \"kubernetes.io/projected/c53073d9-6cbf-4e29-95cb-88254007d7d4-kube-api-access-6lg4p\") pod \"dns-default-6nc5z\" (UID: \"c53073d9-6cbf-4e29-95cb-88254007d7d4\") " pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.817279 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6pn79\" (UID: \"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.825706 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgwpf\" (UniqueName: \"kubernetes.io/projected/6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3-kube-api-access-hgwpf\") pod \"machine-config-server-gkxnv\" (UID: \"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3\") " pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.826443 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9z76\" (UniqueName: \"kubernetes.io/projected/ce0f76b3-4ba2-4981-a888-8f659c504f6c-kube-api-access-j9z76\") pod \"catalog-operator-68c6474976-lrprk\" (UID: \"ce0f76b3-4ba2-4981-a888-8f659c504f6c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.826975 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wllvv\" (UniqueName: \"kubernetes.io/projected/37ac4d54-fa49-4866-96c5-fcc954e9d3e6-kube-api-access-wllvv\") pod \"olm-operator-6b444d44fb-9b85x\" (UID: \"37ac4d54-fa49-4866-96c5-fcc954e9d3e6\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.828113 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.838720 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.844757 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.844932 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.845149 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.345121543 +0000 UTC m=+96.789499753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.845373 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.845813 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.345796171 +0000 UTC m=+96.790174321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.860930 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.863070 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gkxnv" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.877279 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.889837 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.913135 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.915055 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spnwm\" (UniqueName: \"kubernetes.io/projected/e2f7854d-11cc-4a65-977c-8a1570116842-kube-api-access-spnwm\") pod \"ingress-canary-7phqq\" (UID: \"e2f7854d-11cc-4a65-977c-8a1570116842\") " pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.920251 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7nmv\" (UniqueName: \"kubernetes.io/projected/2d03e7cc-e66c-4be7-a167-6e8619011299-kube-api-access-v7nmv\") pod \"migrator-59844c95c7-7fvb4\" (UID: \"2d03e7cc-e66c-4be7-a167-6e8619011299\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.923382 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnn9w\" (UniqueName: \"kubernetes.io/projected/ef684a3a-493f-4116-ae57-a0e732765982-kube-api-access-mnn9w\") pod \"kube-storage-version-migrator-operator-b67b599dd-6f6vb\" (UID: \"ef684a3a-493f-4116-ae57-a0e732765982\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.927564 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfs6w\" (UniqueName: \"kubernetes.io/projected/0e804c82-ec6a-4d08-bd18-d50942c0d985-kube-api-access-kfs6w\") pod \"csi-hostpathplugin-nh95j\" (UID: \"0e804c82-ec6a-4d08-bd18-d50942c0d985\") " pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.927858 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvnsf\" (UniqueName: \"kubernetes.io/projected/998b11ed-322d-49b3-9a3a-79474037d6ea-kube-api-access-fvnsf\") pod \"console-f9d7485db-jkrgw\" (UID: \"998b11ed-322d-49b3-9a3a-79474037d6ea\") " pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.946432 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.946609 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.4465836 +0000 UTC m=+96.890961750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.946807 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:03 crc kubenswrapper[4956]: E1211 21:50:03.947149 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.447136076 +0000 UTC m=+96.891514226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.949334 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtchc\" (UniqueName: \"kubernetes.io/projected/2d8a22dd-465c-4327-8d76-782e5d289942-kube-api-access-dtchc\") pod \"control-plane-machine-set-operator-78cbb6b69f-wnxdh\" (UID: \"2d8a22dd-465c-4327-8d76-782e5d289942\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.953791 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.980932 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:03 crc kubenswrapper[4956]: W1211 21:50:03.990004 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6dfd68e7_13f2_49a6_9304_a56d7fb6b2d3.slice/crio-2d99f235ae2748ccf6ce2c9bc5240c1fb4c0975626cbe7c4b272e4f86c30adb8 WatchSource:0}: Error finding container 2d99f235ae2748ccf6ce2c9bc5240c1fb4c0975626cbe7c4b272e4f86c30adb8: Status 404 returned error can't find the container with id 2d99f235ae2748ccf6ce2c9bc5240c1fb4c0975626cbe7c4b272e4f86c30adb8 Dec 11 21:50:03 crc kubenswrapper[4956]: I1211 21:50:03.998573 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.005313 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.010624 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt4wm\" (UniqueName: \"kubernetes.io/projected/b39bb8fb-c84a-48cc-aa65-b992c06a090b-kube-api-access-qt4wm\") pod \"collect-profiles-29424825-fsrlm\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.024186 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.042236 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl2dd\" (UniqueName: \"kubernetes.io/projected/20336def-d6ab-4203-8957-629a61fec0a7-kube-api-access-nl2dd\") pod \"package-server-manager-789f6589d5-8r689\" (UID: \"20336def-d6ab-4203-8957-629a61fec0a7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.045934 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9222bc8f-bd0e-40f3-be61-07b4f951adae-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n77qx\" (UID: \"9222bc8f-bd0e-40f3-be61-07b4f951adae\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.049335 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.049659 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.549638921 +0000 UTC m=+96.994017071 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.060013 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.069696 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.088187 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.096275 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.105791 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.109448 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.119613 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.138953 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.145630 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-tmhkw" podStartSLOduration=77.145615504 podStartE2EDuration="1m17.145615504s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:04.144620307 +0000 UTC m=+96.588998457" watchObservedRunningTime="2025-12-11 21:50:04.145615504 +0000 UTC m=+96.589993644" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.150203 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.150594 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.650569614 +0000 UTC m=+97.094947804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.168088 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7phqq" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.182212 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=19.182197523 podStartE2EDuration="19.182197523s" podCreationTimestamp="2025-12-11 21:49:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:04.180280781 +0000 UTC m=+96.624658931" watchObservedRunningTime="2025-12-11 21:50:04.182197523 +0000 UTC m=+96.626575673" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.187321 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.250594 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.250945 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.750926793 +0000 UTC m=+97.195304943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.258926 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbrht\" (UniqueName: \"kubernetes.io/projected/cf15492b-35b0-42ad-a13d-540ccaa7dc23-kube-api-access-sbrht\") pod \"machine-config-operator-74547568cd-56fs7\" (UID: \"cf15492b-35b0-42ad-a13d-540ccaa7dc23\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:04 crc kubenswrapper[4956]: W1211 21:50:04.278332 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod364d355b_96bc_4ce2_9734_6758414934fc.slice/crio-8be2dd785af5014ecc080db8632fda21dff3353f860cdae053fde84c34218c3c WatchSource:0}: Error finding container 8be2dd785af5014ecc080db8632fda21dff3353f860cdae053fde84c34218c3c: Status 404 returned error can't find the container with id 8be2dd785af5014ecc080db8632fda21dff3353f860cdae053fde84c34218c3c Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.296762 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-fbpg7" event={"ID":"1f36b124-c397-4935-82b6-191d83292d1b","Type":"ContainerStarted","Data":"164a71b4cdf0ed4f4c931ebd1731a06ace0c6132da8a0301e11d6a2312196180"} Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.298095 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" event={"ID":"8c2ab16f-cc2d-4319-ac87-974565b63c6e","Type":"ContainerStarted","Data":"290c78740a3a07c70a12a8ae2cda54981a2f9c7935823c7df0fb7b45fddf291d"} Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.299223 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" event={"ID":"364d355b-96bc-4ce2-9734-6758414934fc","Type":"ContainerStarted","Data":"8be2dd785af5014ecc080db8632fda21dff3353f860cdae053fde84c34218c3c"} Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.300314 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gkxnv" event={"ID":"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3","Type":"ContainerStarted","Data":"2d99f235ae2748ccf6ce2c9bc5240c1fb4c0975626cbe7c4b272e4f86c30adb8"} Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.353686 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.354565 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.354884 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.854870956 +0000 UTC m=+97.299249106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.461685 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.462265 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:04.962251082 +0000 UTC m=+97.406629222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.563310 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.563726 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.063711899 +0000 UTC m=+97.508090059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.664046 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.664325 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.164298763 +0000 UTC m=+97.608676923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.767085 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.767516 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.267501907 +0000 UTC m=+97.711880057 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.868381 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.868628 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.368613876 +0000 UTC m=+97.812992026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:04 crc kubenswrapper[4956]: I1211 21:50:04.969298 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:04 crc kubenswrapper[4956]: E1211 21:50:04.969637 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.469626612 +0000 UTC m=+97.914004762 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.079611 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.080061 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.580046617 +0000 UTC m=+98.024424767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.181437 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.181930 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.681914316 +0000 UTC m=+98.126292466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.207947 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.207928514 podStartE2EDuration="3.207928514s" podCreationTimestamp="2025-12-11 21:50:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:05.205185152 +0000 UTC m=+97.649563332" watchObservedRunningTime="2025-12-11 21:50:05.207928514 +0000 UTC m=+97.652306664" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.282494 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.282639 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.782609593 +0000 UTC m=+98.226987753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.282995 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.283353 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.783338242 +0000 UTC m=+98.227716392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.305639 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" event={"ID":"364d355b-96bc-4ce2-9734-6758414934fc","Type":"ContainerStarted","Data":"a16b8adcdb0bd9c0ea238f91d6ffd06f39eaf5f1ee93fede049babadca6c3c5b"} Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.308439 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gkxnv" event={"ID":"6dfd68e7-13f2-49a6-9304-a56d7fb6b2d3","Type":"ContainerStarted","Data":"8c4408de17582149a08a0a0dbf8e1a98c3a113843465db18db3b7f20091fc527"} Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.311249 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-fbpg7" event={"ID":"1f36b124-c397-4935-82b6-191d83292d1b","Type":"ContainerStarted","Data":"7b8ff33d4ab26d3ac27e0c4e69961d91c9dcbd18a360e3cc9eed14fcf037cd28"} Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.313683 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" event={"ID":"8c2ab16f-cc2d-4319-ac87-974565b63c6e","Type":"ContainerStarted","Data":"09bc39e082c08aef4c184ba0bf925d58e6f56b9f3d803cb681aed12d2f62fba5"} Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.313711 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" event={"ID":"8c2ab16f-cc2d-4319-ac87-974565b63c6e","Type":"ContainerStarted","Data":"f30acf770a2de67244a997c7668031e4b7b5f15b2a1b6b90834b8bd4fb1165f9"} Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.383664 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.384202 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.884175543 +0000 UTC m=+98.328553693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.384294 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.385645 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.885630542 +0000 UTC m=+98.330008772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.486980 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.487361 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:05.987327036 +0000 UTC m=+98.431705186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.588322 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.591484 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.091467475 +0000 UTC m=+98.535845625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.653263 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-9g9fs"] Dec 11 21:50:05 crc kubenswrapper[4956]: W1211 21:50:05.655924 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc04a25ee_01c0_4fed_b4c5_a9984606786a.slice/crio-45f949d4ad353ad95ca06f94db65bb922d2a3746cce4a2f8b1d68a41be0464e8 WatchSource:0}: Error finding container 45f949d4ad353ad95ca06f94db65bb922d2a3746cce4a2f8b1d68a41be0464e8: Status 404 returned error can't find the container with id 45f949d4ad353ad95ca06f94db65bb922d2a3746cce4a2f8b1d68a41be0464e8 Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.665595 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d"] Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.718977 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.719553 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.719749 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.219720882 +0000 UTC m=+98.664099032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.719918 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.720027 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.721376 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.221367896 +0000 UTC m=+98.665746046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.724734 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zrrsn"] Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.733892 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:05 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:05 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:05 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.733950 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.737157 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/534554e4-788d-4649-9dfc-ab5fd83d37d9-metrics-certs\") pod \"network-metrics-daemon-fgzkb\" (UID: \"534554e4-788d-4649-9dfc-ab5fd83d37d9\") " pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.743920 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw"] Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.747548 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh"] Dec 11 21:50:05 crc kubenswrapper[4956]: W1211 21:50:05.764318 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7758ba9_d0df_47cc_a703_d69e4e001adf.slice/crio-7ba8e8fba19889e989d1ec210e3c78b5d4f83df920e297301ef4e65546f8d269 WatchSource:0}: Error finding container 7ba8e8fba19889e989d1ec210e3c78b5d4f83df920e297301ef4e65546f8d269: Status 404 returned error can't find the container with id 7ba8e8fba19889e989d1ec210e3c78b5d4f83df920e297301ef4e65546f8d269 Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.820697 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.821414 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.321387865 +0000 UTC m=+98.765766015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.821727 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.822100 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.322083644 +0000 UTC m=+98.766461794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.831475 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr"] Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.836678 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk"] Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.871818 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-8tg54" podStartSLOduration=78.87179996 podStartE2EDuration="1m18.87179996s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:05.869401188 +0000 UTC m=+98.313779338" watchObservedRunningTime="2025-12-11 21:50:05.87179996 +0000 UTC m=+98.316178110" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.908685 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-fbpg7" podStartSLOduration=78.908665128 podStartE2EDuration="1m18.908665128s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:05.908153703 +0000 UTC m=+98.352531853" watchObservedRunningTime="2025-12-11 21:50:05.908665128 +0000 UTC m=+98.353043278" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.923042 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.923244 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.423220253 +0000 UTC m=+98.867598403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.923283 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:05 crc kubenswrapper[4956]: E1211 21:50:05.923632 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.423620013 +0000 UTC m=+98.867998163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.948584 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-gkxnv" podStartSLOduration=5.948566234 podStartE2EDuration="5.948566234s" podCreationTimestamp="2025-12-11 21:50:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:05.946565922 +0000 UTC m=+98.390944072" watchObservedRunningTime="2025-12-11 21:50:05.948566234 +0000 UTC m=+98.392944384" Dec 11 21:50:05 crc kubenswrapper[4956]: I1211 21:50:05.949832 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fgzkb" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.005461 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.014668 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9m9dx" podStartSLOduration=80.014641354 podStartE2EDuration="1m20.014641354s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:05.999213966 +0000 UTC m=+98.443592126" watchObservedRunningTime="2025-12-11 21:50:06.014641354 +0000 UTC m=+98.459019514" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.024342 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.024474 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.524457214 +0000 UTC m=+98.968835364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.025087 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.025347 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.525337578 +0000 UTC m=+98.969715728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: W1211 21:50:06.027035 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf97aa52_8dc9_46d3_932e_545b1c736c9b.slice/crio-030a5f2135be38b02aca808e4a857337c2968c15c1afee14deeac742701c8744 WatchSource:0}: Error finding container 030a5f2135be38b02aca808e4a857337c2968c15c1afee14deeac742701c8744: Status 404 returned error can't find the container with id 030a5f2135be38b02aca808e4a857337c2968c15c1afee14deeac742701c8744 Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.054684 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dkst5"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.065936 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.065991 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4mlhz"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.066001 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.077316 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6nc5z"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.081140 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.086583 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-4lmpq"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.087975 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-f4wrs"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.092906 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-b28md"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.095998 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-j8hd5"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.102579 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-zvhmh"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.127641 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.128301 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.628282416 +0000 UTC m=+99.072660566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.140260 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79"] Dec 11 21:50:06 crc kubenswrapper[4956]: W1211 21:50:06.182984 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ca85300_63e6_412c_917b_df0c8696dfda.slice/crio-220188a541fcd0ef298be26bc7d3aac1b7421f3ba47e4015a45c550b46213715 WatchSource:0}: Error finding container 220188a541fcd0ef298be26bc7d3aac1b7421f3ba47e4015a45c550b46213715: Status 404 returned error can't find the container with id 220188a541fcd0ef298be26bc7d3aac1b7421f3ba47e4015a45c550b46213715 Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.229267 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.229627 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.729615739 +0000 UTC m=+99.173993889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.323733 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" event={"ID":"2ca85300-63e6-412c-917b-df0c8696dfda","Type":"ContainerStarted","Data":"220188a541fcd0ef298be26bc7d3aac1b7421f3ba47e4015a45c550b46213715"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.328576 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" event={"ID":"45c3a183-07ab-4339-92b3-97eac03e9601","Type":"ContainerStarted","Data":"a968b7bdfc52c0e4f1f89fbc749a4954ec971b3216773fbdac655e7c49ec812f"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.330142 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.330625 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.830607595 +0000 UTC m=+99.274985755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.331525 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" event={"ID":"0190a0a5-2358-4044-b766-f164e0124dab","Type":"ContainerStarted","Data":"adc3e2446fec9da62bec98bcebcb05036b67a7fc349b26e69200d60ba84aa57d"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.338356 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" event={"ID":"d3311546-a763-4f88-87c2-ea9dc6c5d023","Type":"ContainerStarted","Data":"d3e6bd92292a6d4dcfba60ac98ba1d6b10bcd65960ad650005ab11d4af38b7c3"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.368763 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" event={"ID":"0c3066a4-fecf-4608-be24-c0534bd263cc","Type":"ContainerStarted","Data":"a1c1e02039a215374e3e99992f45cb13935c6fa7997d58dc9d00ff4c0e2b374f"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.378106 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" event={"ID":"d44ce400-a741-4232-897e-f9e50bc0f894","Type":"ContainerStarted","Data":"3eb9cdbc5a8c50dc9965917ec58190a0a66bc0ead0ef8b7fd6d444d5a61e6a25"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.378161 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" event={"ID":"d44ce400-a741-4232-897e-f9e50bc0f894","Type":"ContainerStarted","Data":"42c3f5877edd32c0eee599e74b97d3c4462149de907cb30c4b2a66eba9d4d0e7"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.378173 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.401835 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.402156 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zrrsn" podStartSLOduration=79.402137489 podStartE2EDuration="1m19.402137489s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.398626737 +0000 UTC m=+98.843004887" watchObservedRunningTime="2025-12-11 21:50:06.402137489 +0000 UTC m=+98.846515639" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.407318 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6nc5z" event={"ID":"c53073d9-6cbf-4e29-95cb-88254007d7d4","Type":"ContainerStarted","Data":"eb405cc1e6073fdc21a86cd0eb70ef294b62e466d4ae868ca42b5c0d667a0820"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.409406 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d8mkh"] Dec 11 21:50:06 crc kubenswrapper[4956]: W1211 21:50:06.409597 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb39bb8fb_c84a_48cc_aa65_b992c06a090b.slice/crio-8acea8f51dbfa931156b719a185bdd338f0fef629f00e121efc601b0046318b3 WatchSource:0}: Error finding container 8acea8f51dbfa931156b719a185bdd338f0fef629f00e121efc601b0046318b3: Status 404 returned error can't find the container with id 8acea8f51dbfa931156b719a185bdd338f0fef629f00e121efc601b0046318b3 Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.418120 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" event={"ID":"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f","Type":"ContainerStarted","Data":"2e1dfff2da5e8a152fb1432636c752b334ffa11a548c531ca57776f1e2f86531"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.431351 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.432151 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" event={"ID":"ce0f76b3-4ba2-4981-a888-8f659c504f6c","Type":"ContainerStarted","Data":"cd82545a18208fc8419a4a384abdf3f423fa03a0d790e233a870b133c4a4f239"} Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.432901 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:06.932888424 +0000 UTC m=+99.377266574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.461220 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" event={"ID":"3b1e309e-6542-43b9-95cc-3197be39a203","Type":"ContainerStarted","Data":"4dfcdf7f33513692e0cdadf4c6a7b1fe0cb4bbbffc65af91a42a891673628e01"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.466347 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.475723 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.478827 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.491338 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.500538 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.508094 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nh95j"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.508136 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ztk92"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.508146 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.509678 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-jkrgw"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.510724 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" event={"ID":"fab9c443-9e80-4943-bfa0-0902f4377230","Type":"ContainerStarted","Data":"f7e4ab5e07465cabeac5e8a677a0da486086d8bf4942ad724ecdc45194c7fdbe"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.510781 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" event={"ID":"fab9c443-9e80-4943-bfa0-0902f4377230","Type":"ContainerStarted","Data":"57d97e71eb3e12c4f6e045eca90dc55e551a22a750f68870c2085b8ca0226557"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.511870 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.535804 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.536981 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.036964391 +0000 UTC m=+99.481342541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.537364 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" event={"ID":"b058f1ae-0331-46f7-a1e8-339dbf9a9405","Type":"ContainerStarted","Data":"9e84d74b9e623537459d39b614e3c6b6c9054716b8a4a548b0e9f034e9a24db1"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.543287 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" event={"ID":"b7758ba9-d0df-47cc-a703-d69e4e001adf","Type":"ContainerStarted","Data":"5e1cafbe07303dc4748176f267a7a61e92b393e7b18c9c2644564d1198c01bbc"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.543319 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" event={"ID":"b7758ba9-d0df-47cc-a703-d69e4e001adf","Type":"ContainerStarted","Data":"8c3f40f1615f3b57733a22222d2c23cdf7a0440adc2ba81ec175b3a6e8c12c79"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.543328 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" event={"ID":"b7758ba9-d0df-47cc-a703-d69e4e001adf","Type":"ContainerStarted","Data":"7ba8e8fba19889e989d1ec210e3c78b5d4f83df920e297301ef4e65546f8d269"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.545605 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" podStartSLOduration=79.54559283 podStartE2EDuration="1m19.54559283s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.537214228 +0000 UTC m=+98.981592378" watchObservedRunningTime="2025-12-11 21:50:06.54559283 +0000 UTC m=+98.989970980" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.550851 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" event={"ID":"c04a25ee-01c0-4fed-b4c5-a9984606786a","Type":"ContainerStarted","Data":"7e9d3aa61792989984eaa8b1e19824b08ce3ccd121437584743419f1db9b98a5"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.550909 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" event={"ID":"c04a25ee-01c0-4fed-b4c5-a9984606786a","Type":"ContainerStarted","Data":"45f949d4ad353ad95ca06f94db65bb922d2a3746cce4a2f8b1d68a41be0464e8"} Dec 11 21:50:06 crc kubenswrapper[4956]: W1211 21:50:06.557373 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d03e7cc_e66c_4be7_a167_6e8619011299.slice/crio-b0313a053384fdce4cec7cd7be00b426301c183d78836a2d54b8c0184348f0ac WatchSource:0}: Error finding container b0313a053384fdce4cec7cd7be00b426301c183d78836a2d54b8c0184348f0ac: Status 404 returned error can't find the container with id b0313a053384fdce4cec7cd7be00b426301c183d78836a2d54b8c0184348f0ac Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.559001 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" podStartSLOduration=79.558985154 podStartE2EDuration="1m19.558985154s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.557048653 +0000 UTC m=+99.001426823" watchObservedRunningTime="2025-12-11 21:50:06.558985154 +0000 UTC m=+99.003363304" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.560588 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" event={"ID":"37ac4d54-fa49-4866-96c5-fcc954e9d3e6","Type":"ContainerStarted","Data":"7449a80eb4630f45ba9f73a2822c194ecade225439430147fe7b7e5894a47688"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.561253 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:06 crc kubenswrapper[4956]: W1211 21:50:06.564585 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef684a3a_493f_4116_ae57_a0e732765982.slice/crio-f17ecf2c7696af0c46a08042b16d39dfc9a867e8bc796687124659d05ee85c63 WatchSource:0}: Error finding container f17ecf2c7696af0c46a08042b16d39dfc9a867e8bc796687124659d05ee85c63: Status 404 returned error can't find the container with id f17ecf2c7696af0c46a08042b16d39dfc9a867e8bc796687124659d05ee85c63 Dec 11 21:50:06 crc kubenswrapper[4956]: W1211 21:50:06.569487 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf15492b_35b0_42ad_a13d_540ccaa7dc23.slice/crio-0e1fb5960574c6942236e517350109ad891d94849bd86554dfb9d5156d3a8f8e WatchSource:0}: Error finding container 0e1fb5960574c6942236e517350109ad891d94849bd86554dfb9d5156d3a8f8e: Status 404 returned error can't find the container with id 0e1fb5960574c6942236e517350109ad891d94849bd86554dfb9d5156d3a8f8e Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.581004 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" event={"ID":"df97aa52-8dc9-46d3-932e-545b1c736c9b","Type":"ContainerStarted","Data":"030a5f2135be38b02aca808e4a857337c2968c15c1afee14deeac742701c8744"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.581531 4956 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9b85x container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.581561 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" podUID="37ac4d54-fa49-4866-96c5-fcc954e9d3e6" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.584653 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jzbdh" podStartSLOduration=79.584638735 podStartE2EDuration="1m19.584638735s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.582013855 +0000 UTC m=+99.026392035" watchObservedRunningTime="2025-12-11 21:50:06.584638735 +0000 UTC m=+99.029016885" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.592511 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" event={"ID":"fe381376-cd51-4565-be0a-1fd8a77be7ac","Type":"ContainerStarted","Data":"c8acf522ed7eaa16629ca40a45d4576064c0c871bba287b5730eda5beaef5e1d"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.593684 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" event={"ID":"fe381376-cd51-4565-be0a-1fd8a77be7ac","Type":"ContainerStarted","Data":"ceab975f427b215d6ed58e61d706d7f7f91843e1265d11b082008e83b52e9bc6"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.609148 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" event={"ID":"05f4ef31-1e96-4627-ab11-cc326d624062","Type":"ContainerStarted","Data":"ae27bd87f737e19aad3c9d02c60a0867e7283571ee870d7c96f938d09dda1e0c"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.609315 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" event={"ID":"05f4ef31-1e96-4627-ab11-cc326d624062","Type":"ContainerStarted","Data":"6f18857d16a805ca1474b8e00e6eab4b08f77e706fcede4f442922fa3c9463f8"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.645756 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.648255 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.148238319 +0000 UTC m=+99.592616539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.653310 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-9g9fs" podStartSLOduration=80.653292433 podStartE2EDuration="1m20.653292433s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.602864947 +0000 UTC m=+99.047243127" watchObservedRunningTime="2025-12-11 21:50:06.653292433 +0000 UTC m=+99.097670583" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.662754 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.675044 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.676888 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" event={"ID":"99b3c2ef-0c86-427c-9c97-f4a9221b69b1","Type":"ContainerStarted","Data":"18bc39efe5e822b9beefcfe0ae9f0400af5f5f26badf81fe4ff13225a266c018"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.676923 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" event={"ID":"99b3c2ef-0c86-427c-9c97-f4a9221b69b1","Type":"ContainerStarted","Data":"06fccfa9eada3e7a2aab2d3e9511144a8406cf6b73aea49a9966a94a821cadc8"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.681407 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" event={"ID":"06f990d2-0043-4e1c-9a1d-34c70bc123d4","Type":"ContainerStarted","Data":"2d8858b9fc49d84eef664d6c5535382c41e557adad677e42099ef872e905a121"} Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.685524 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" podStartSLOduration=79.685509237 podStartE2EDuration="1m19.685509237s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.671699931 +0000 UTC m=+99.116078071" watchObservedRunningTime="2025-12-11 21:50:06.685509237 +0000 UTC m=+99.129887387" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.689537 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7phqq"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.701676 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:06 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:06 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:06 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.701712 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.710645 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" podStartSLOduration=79.710603591 podStartE2EDuration="1m19.710603591s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.698655525 +0000 UTC m=+99.143033675" watchObservedRunningTime="2025-12-11 21:50:06.710603591 +0000 UTC m=+99.154981741" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.712441 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hq2jl"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.730074 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9dkxr" podStartSLOduration=80.730055886 podStartE2EDuration="1m20.730055886s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.726666757 +0000 UTC m=+99.171044907" watchObservedRunningTime="2025-12-11 21:50:06.730055886 +0000 UTC m=+99.174434036" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.730264 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fgzkb"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.743212 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hsjmq"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.746911 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.747462 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.247442927 +0000 UTC m=+99.691821077 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.755456 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" podStartSLOduration=79.755440978 podStartE2EDuration="1m19.755440978s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:06.754245077 +0000 UTC m=+99.198623227" watchObservedRunningTime="2025-12-11 21:50:06.755440978 +0000 UTC m=+99.199819128" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.782189 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689"] Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.847913 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.848344 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.34833116 +0000 UTC m=+99.792709310 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.861257 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.970879 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.971131 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.471099172 +0000 UTC m=+99.915477322 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:06 crc kubenswrapper[4956]: I1211 21:50:06.971247 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:06 crc kubenswrapper[4956]: E1211 21:50:06.972882 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.472869899 +0000 UTC m=+99.917248049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.076327 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.077082 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.577062259 +0000 UTC m=+100.021440409 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.188563 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.188947 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.688932872 +0000 UTC m=+100.133311022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.291264 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.291998 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.791978732 +0000 UTC m=+100.236356882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.404378 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.404738 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:07.904721848 +0000 UTC m=+100.349099998 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.508446 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.508967 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.00895084 +0000 UTC m=+100.453329000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.610450 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.610892 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.110875909 +0000 UTC m=+100.555254059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.695512 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:07 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:07 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:07 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.695707 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.712593 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.713161 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.213146498 +0000 UTC m=+100.657524648 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.755237 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6nc5z" event={"ID":"c53073d9-6cbf-4e29-95cb-88254007d7d4","Type":"ContainerStarted","Data":"ade69feee41dbd24d4c49a87c033956066d09fafbd718964cc2981d59ef18af8"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.785842 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" event={"ID":"74c4dba3-53de-449c-9360-9ec5d8a00b1d","Type":"ContainerStarted","Data":"d15dc077d981ae721cbfe90ccb3961d7bb8619b242e36c265b04d8c9b1e9c5ae"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.801145 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7phqq" event={"ID":"e2f7854d-11cc-4a65-977c-8a1570116842","Type":"ContainerStarted","Data":"8df4a5c7df3df9adc06395209ad91dd28f2bfad9409528284459ac08212506be"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.815284 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" event={"ID":"78d9268c-ae46-4117-8674-2a7d107831bd","Type":"ContainerStarted","Data":"073dd0642339e8feef2a83be0b8bd8816cec73a8488a7ee2b5b2fd5c15e57a57"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.815823 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.816158 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.316148747 +0000 UTC m=+100.760526897 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.824167 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-5k9nv" event={"ID":"06f990d2-0043-4e1c-9a1d-34c70bc123d4","Type":"ContainerStarted","Data":"9f2fb5a6f362bac8ad1698a66506240644b2e3c7ace7bb9d4b07dbeb8b49d54d"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.836683 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" event={"ID":"534554e4-788d-4649-9dfc-ab5fd83d37d9","Type":"ContainerStarted","Data":"9a65901f844b173b5476b0ff02fd270027c615056df7dbadc5e39e779ac7c968"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.840491 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" event={"ID":"9222bc8f-bd0e-40f3-be61-07b4f951adae","Type":"ContainerStarted","Data":"5f79963d307d9b2835581e898f7ee1b93a0cf86ffc482dfa5eb0ee774db5ce80"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.841611 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" event={"ID":"6dae87cb-e091-408e-9b9d-4d45e7797fc5","Type":"ContainerStarted","Data":"11c57e4b7a2ace1261239b7ec8a9c44be21f6f62bd683b7741801610aad96bbe"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.855183 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-7pmjk" event={"ID":"b058f1ae-0331-46f7-a1e8-339dbf9a9405","Type":"ContainerStarted","Data":"451ae8b67acc4e407675a51816ca2af52b68d7b02dab251988612041cdbc9b84"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.874412 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" event={"ID":"1cd84235-0b8b-43a0-8d10-6324b5759eac","Type":"ContainerStarted","Data":"37d5ab95af11fa6ff4be325285b88967e544824d4755923c2d364f87d3c65058"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.889807 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" event={"ID":"20336def-d6ab-4203-8957-629a61fec0a7","Type":"ContainerStarted","Data":"1a6b0394ff66fa1c5fa337a6d8e5bbd9e502a6bb4721a8853341060da34ce115"} Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.916987 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:07 crc kubenswrapper[4956]: E1211 21:50:07.918486 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.418467398 +0000 UTC m=+100.862845548 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:07 crc kubenswrapper[4956]: I1211 21:50:07.928787 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wm8v7" event={"ID":"df97aa52-8dc9-46d3-932e-545b1c736c9b","Type":"ContainerStarted","Data":"8f96003f1be84dead58ea92e5dc5fed46ef4aa172faf06e343823d5865846d2b"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.056214 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.056489 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.556477605 +0000 UTC m=+101.000855755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.159062 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.159426 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.65940894 +0000 UTC m=+101.103787090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.268448 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.268762 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.768749607 +0000 UTC m=+101.213127757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.381302 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.381708 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.881693229 +0000 UTC m=+101.326071379 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.382475 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" event={"ID":"cf15492b-35b0-42ad-a13d-540ccaa7dc23","Type":"ContainerStarted","Data":"0e1fb5960574c6942236e517350109ad891d94849bd86554dfb9d5156d3a8f8e"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.382517 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jkrgw" event={"ID":"998b11ed-322d-49b3-9a3a-79474037d6ea","Type":"ContainerStarted","Data":"02b3d260cbf8121940e36380bf4d8619dfdcdacfc5e4569dcedf1cb0b953cfcd"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.499307 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.499642 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:08.999631094 +0000 UTC m=+101.444009244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.600731 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.601479 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.101461601 +0000 UTC m=+101.545839751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.663180 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" event={"ID":"62802da2-70ad-46d2-bc51-b9bf3e0b6086","Type":"ContainerStarted","Data":"5a24da353f835950292ac6c9b2d0d0d23f020db6ea2a92e9daf1e643fb3b3f28"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.663226 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" event={"ID":"62802da2-70ad-46d2-bc51-b9bf3e0b6086","Type":"ContainerStarted","Data":"b9a7f7a6fdbcd24da2e91b63d1eb12dedc81ed30a488d1d10f6ab11d87b6a6bb"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.664171 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.667855 4956 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d8mkh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.667901 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.695919 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:08 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:08 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:08 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.695966 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.701984 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.702298 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.202285362 +0000 UTC m=+101.646663512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.710197 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" event={"ID":"37ac4d54-fa49-4866-96c5-fcc954e9d3e6","Type":"ContainerStarted","Data":"05f28a6f126d01eafa8710adfb21a35607702a6070283c8eba7d6ae61557bef6"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.782815 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9b85x" Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.795183 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" event={"ID":"ef684a3a-493f-4116-ae57-a0e732765982","Type":"ContainerStarted","Data":"f17ecf2c7696af0c46a08042b16d39dfc9a867e8bc796687124659d05ee85c63"} Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.803564 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.804595 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.304580701 +0000 UTC m=+101.748958851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.904310 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:08 crc kubenswrapper[4956]: E1211 21:50:08.904884 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.404874429 +0000 UTC m=+101.849252579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:08 crc kubenswrapper[4956]: I1211 21:50:08.943325 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" event={"ID":"62552ab3-7cb2-4f75-8f3a-75d264a50f66","Type":"ContainerStarted","Data":"6181a65cb1443b5779f498e134ee9847767fac5a953b7c1c3a9a8fdf9c806f7c"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.006256 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.007057 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.507040475 +0000 UTC m=+101.951418625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.023928 4956 generic.go:334] "Generic (PLEG): container finished" podID="3b1e309e-6542-43b9-95cc-3197be39a203" containerID="55499399a7de8e4b3bcd38d840c1cd6616c011687c1905f079e041b5c8f771f6" exitCode=0 Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.023988 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" event={"ID":"3b1e309e-6542-43b9-95cc-3197be39a203","Type":"ContainerDied","Data":"55499399a7de8e4b3bcd38d840c1cd6616c011687c1905f079e041b5c8f771f6"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.075472 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" event={"ID":"0190a0a5-2358-4044-b766-f164e0124dab","Type":"ContainerStarted","Data":"e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.076342 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.107752 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.108113 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.608098522 +0000 UTC m=+102.052476672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.111924 4956 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-zvhmh container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.111977 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" podUID="0190a0a5-2358-4044-b766-f164e0124dab" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.112231 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" event={"ID":"d3311546-a763-4f88-87c2-ea9dc6c5d023","Type":"ContainerStarted","Data":"01bc5a7393d1c1f880e5b4de95c83a9ef820f01e36f46180a947b18da0c2c352"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.112263 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" event={"ID":"d3311546-a763-4f88-87c2-ea9dc6c5d023","Type":"ContainerStarted","Data":"804314e3b2ba86051cefdc0cc2d4860eedea9c58fdc5ecd15ef90e252a379007"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.150993 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" event={"ID":"2d03e7cc-e66c-4be7-a167-6e8619011299","Type":"ContainerStarted","Data":"b0313a053384fdce4cec7cd7be00b426301c183d78836a2d54b8c0184348f0ac"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.174530 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ztk92" event={"ID":"3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294","Type":"ContainerStarted","Data":"992873ee0a6e733f62db886cd04baf07c22ef74320de8927013ef05c92e6013c"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.175307 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.186861 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.186914 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.189127 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" event={"ID":"ce0f76b3-4ba2-4981-a888-8f659c504f6c","Type":"ContainerStarted","Data":"ceb1a145d6a4843f2c2d92731daa2ee7ca552d6b81d279129049284fb27a39f2"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.190018 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.201437 4956 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-lrprk container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.201474 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" podUID="ce0f76b3-4ba2-4981-a888-8f659c504f6c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.205070 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" event={"ID":"0c3066a4-fecf-4608-be24-c0534bd263cc","Type":"ContainerStarted","Data":"5dc56661cbc173fc868185bd5f2fc9abcb19521a4cc5a08d43e466275e14938f"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.208296 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.209340 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.709326553 +0000 UTC m=+102.153704703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.237152 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" event={"ID":"2d8a22dd-465c-4327-8d76-782e5d289942","Type":"ContainerStarted","Data":"6cc2b8a391d57bd33d70c31b25c96a1cc633b65be4990fa938113aab9d01c765"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.255465 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" event={"ID":"0e804c82-ec6a-4d08-bd18-d50942c0d985","Type":"ContainerStarted","Data":"037c44f0ef22c1776bcc8abdad6c01978d63e2b3fb5e04d6cdc5aa6cff073f5b"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.309548 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.310841 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.810828053 +0000 UTC m=+102.255206193 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.366470 4956 generic.go:334] "Generic (PLEG): container finished" podID="fe381376-cd51-4565-be0a-1fd8a77be7ac" containerID="c8acf522ed7eaa16629ca40a45d4576064c0c871bba287b5730eda5beaef5e1d" exitCode=0 Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.366663 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" event={"ID":"fe381376-cd51-4565-be0a-1fd8a77be7ac","Type":"ContainerDied","Data":"c8acf522ed7eaa16629ca40a45d4576064c0c871bba287b5730eda5beaef5e1d"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.423170 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.428049 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:09.928031077 +0000 UTC m=+102.372409227 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.436307 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" event={"ID":"e5d35c7f-7d08-4c15-a193-867b0b8ea71e","Type":"ContainerStarted","Data":"96fb66badfd67e80947566372664762ef46ca6b78cb2e1aa1c5c51f0de711951"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.454679 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" event={"ID":"45c3a183-07ab-4339-92b3-97eac03e9601","Type":"ContainerStarted","Data":"667eb93aac3b168045b73d57ea0ad6abfffd39fff4d5a379de40d629ff93d1a5"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.455511 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.525005 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.527008 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.026997349 +0000 UTC m=+102.471375499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.528197 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" event={"ID":"b39bb8fb-c84a-48cc-aa65-b992c06a090b","Type":"ContainerStarted","Data":"8acea8f51dbfa931156b719a185bdd338f0fef629f00e121efc601b0046318b3"} Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.643806 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.644223 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.144203264 +0000 UTC m=+102.588581414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.652341 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.700437 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:09 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:09 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:09 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.700492 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.746019 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.748889 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.248875826 +0000 UTC m=+102.693253976 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.849428 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.850175 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.350153679 +0000 UTC m=+102.794531829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:09 crc kubenswrapper[4956]: I1211 21:50:09.951140 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:09 crc kubenswrapper[4956]: E1211 21:50:09.951493 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.451481683 +0000 UTC m=+102.895859833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.052674 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.053123 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.553105326 +0000 UTC m=+102.997483476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.078168 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" podStartSLOduration=83.078151169 podStartE2EDuration="1m23.078151169s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.076069794 +0000 UTC m=+102.520447944" watchObservedRunningTime="2025-12-11 21:50:10.078151169 +0000 UTC m=+102.522529319" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.154420 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.155089 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.655075537 +0000 UTC m=+103.099453687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.162645 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-ztk92" podStartSLOduration=83.162622477 podStartE2EDuration="1m23.162622477s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.126594622 +0000 UTC m=+102.570972782" watchObservedRunningTime="2025-12-11 21:50:10.162622477 +0000 UTC m=+102.607000627" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.164352 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" podStartSLOduration=83.164341292 podStartE2EDuration="1m23.164341292s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.162124134 +0000 UTC m=+102.606502274" watchObservedRunningTime="2025-12-11 21:50:10.164341292 +0000 UTC m=+102.608719442" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.197980 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" podStartSLOduration=83.197966383 podStartE2EDuration="1m23.197966383s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.196981196 +0000 UTC m=+102.641359346" watchObservedRunningTime="2025-12-11 21:50:10.197966383 +0000 UTC m=+102.642344533" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.240709 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-b28md" podStartSLOduration=83.240691195 podStartE2EDuration="1m23.240691195s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.238895827 +0000 UTC m=+102.683273977" watchObservedRunningTime="2025-12-11 21:50:10.240691195 +0000 UTC m=+102.685069345" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.260727 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.261233 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.761218968 +0000 UTC m=+103.205597108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.363483 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.363883 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.863872218 +0000 UTC m=+103.308250368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.396106 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" podStartSLOduration=83.396086172 podStartE2EDuration="1m23.396086172s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.377148689 +0000 UTC m=+102.821526839" watchObservedRunningTime="2025-12-11 21:50:10.396086172 +0000 UTC m=+102.840464322" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.467663 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.467833 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.967807051 +0000 UTC m=+103.412185201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.468031 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.468359 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:10.968352915 +0000 UTC m=+103.412731065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.539500 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" event={"ID":"534554e4-788d-4649-9dfc-ab5fd83d37d9","Type":"ContainerStarted","Data":"191a7f1cea0b90d1c463603d57212329abc69bb66ba205e7c7de2e6f87f56701"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.539548 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fgzkb" event={"ID":"534554e4-788d-4649-9dfc-ab5fd83d37d9","Type":"ContainerStarted","Data":"377a034eccdfd6da716b719041f549206427cc02119ae542dd4cf5157456f5ab"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.551166 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" event={"ID":"9222bc8f-bd0e-40f3-be61-07b4f951adae","Type":"ContainerStarted","Data":"3d1f521e39406169d05a83f8e072e46d363d575582559bcfe40fb1cbcbc3a55b"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.567750 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" event={"ID":"b39bb8fb-c84a-48cc-aa65-b992c06a090b","Type":"ContainerStarted","Data":"cb24e1ea9f15362e076ca5767d1f9ead26156ff22e051077c0197e828a3da69d"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.568697 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.568803 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.068789096 +0000 UTC m=+103.513167246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.569168 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.570387 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.070377708 +0000 UTC m=+103.514755858 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.580454 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" event={"ID":"2d03e7cc-e66c-4be7-a167-6e8619011299","Type":"ContainerStarted","Data":"9adfe06c7acf6f28a74a7badf4dbc799e70e34915a20e1605db389a5b6659a84"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.580505 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" event={"ID":"2d03e7cc-e66c-4be7-a167-6e8619011299","Type":"ContainerStarted","Data":"92cc1e481bad5d87f23ad2b27f3beba8e61cbbbe343aebfc7a180d29756e3e43"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.588912 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" event={"ID":"e5d35c7f-7d08-4c15-a193-867b0b8ea71e","Type":"ContainerStarted","Data":"63fe77fd67d9f234a5841ee263aadcbe78a924fa02997261e1ce041bfc8cb4fd"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.605268 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" event={"ID":"0e804c82-ec6a-4d08-bd18-d50942c0d985","Type":"ContainerStarted","Data":"c86a1107c3f42842feb0fcfa80913184cd276ec7e845f814a9752bb54ab96a9f"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.620111 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" event={"ID":"2ca85300-63e6-412c-917b-df0c8696dfda","Type":"ContainerStarted","Data":"6691789d7877b7e394f0eca48fd7aa1ff6a53a3c355ab2781d71f120786628bc"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.620164 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" event={"ID":"2ca85300-63e6-412c-917b-df0c8696dfda","Type":"ContainerStarted","Data":"fa1cfbda6926ee7942750a130498897c61465946ad8595e2e5936063fb5b09d8"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.629217 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" event={"ID":"6dae87cb-e091-408e-9b9d-4d45e7797fc5","Type":"ContainerStarted","Data":"ffc9556e97d258b01bfb3b9cd94e82f237e03be95ffb33d792ec60994de8aec4"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.630019 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.634097 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" event={"ID":"cf15492b-35b0-42ad-a13d-540ccaa7dc23","Type":"ContainerStarted","Data":"1cf196c4a6242d456d402e0d0b9675cee0313e38b8553be6bd0d84df73351916"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.634136 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" event={"ID":"cf15492b-35b0-42ad-a13d-540ccaa7dc23","Type":"ContainerStarted","Data":"833f60c291a2ffa8e999c39d49e8b898683e277a6052f48c27f8197db8555c56"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.637821 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6nc5z" event={"ID":"c53073d9-6cbf-4e29-95cb-88254007d7d4","Type":"ContainerStarted","Data":"de32ac7a2ec6a0926c4d4935975a47cc416c0e0e90f49e9dcfdc897fb2dd2c45"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.638289 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.640626 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" event={"ID":"ef684a3a-493f-4116-ae57-a0e732765982","Type":"ContainerStarted","Data":"8c4eceec0890b24c327a102d87c2f3af26efe9a9325974a8c6e3bd6d498b006b"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.648025 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-wnxdh" event={"ID":"2d8a22dd-465c-4327-8d76-782e5d289942","Type":"ContainerStarted","Data":"082666cf5af4539b05d94a5035a8790cdbb98e8403cf21a06c5ab1ca14116514"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.651468 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" event={"ID":"7d63c8a9-8aed-4a38-a43c-ea5c95e6a59f","Type":"ContainerStarted","Data":"bce6205160469f37da1bed46c7e40e8fc719b339c306d4488b1f5ccf53a6e95c"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.657758 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" event={"ID":"20336def-d6ab-4203-8957-629a61fec0a7","Type":"ContainerStarted","Data":"bbcf44de96144f9fdc2dc8a921fa10950f076a08363700326468a4fd45880e5e"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.657808 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" event={"ID":"20336def-d6ab-4203-8957-629a61fec0a7","Type":"ContainerStarted","Data":"64aa4a3b5171deecb525a88e7d5fb17416962683bda3801589fe8bb00011483c"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.658323 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.660648 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ztk92" event={"ID":"3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294","Type":"ContainerStarted","Data":"8e4ea1abb093654b2dc5c75efeccb9e6f0bff9fbf60b250c0f3009c8787af6b2"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.661432 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.661465 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.665752 4956 generic.go:334] "Generic (PLEG): container finished" podID="62552ab3-7cb2-4f75-8f3a-75d264a50f66" containerID="ef4146eb4d47af6c12f4475382406b8cbeb3a37465a72d63ef985a778225686f" exitCode=0 Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.665814 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" event={"ID":"62552ab3-7cb2-4f75-8f3a-75d264a50f66","Type":"ContainerDied","Data":"ef4146eb4d47af6c12f4475382406b8cbeb3a37465a72d63ef985a778225686f"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.671052 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.671330 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.171297222 +0000 UTC m=+103.615675382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.671340 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" event={"ID":"fe381376-cd51-4565-be0a-1fd8a77be7ac","Type":"ContainerStarted","Data":"a9b6301de5ce00417aea10a8d98c9e70842efc067d931265e11c93e7e0e34676"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.671922 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.673031 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.173016557 +0000 UTC m=+103.617394787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.689299 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7phqq" event={"ID":"e2f7854d-11cc-4a65-977c-8a1570116842","Type":"ContainerStarted","Data":"9d3a1838f4efacab7a69b0b6ac3b8edfb8b88b3e5a4bd6f198c01d76366ebe73"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.695758 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" podStartSLOduration=83.69574113 podStartE2EDuration="1m23.69574113s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.688602931 +0000 UTC m=+103.132981081" watchObservedRunningTime="2025-12-11 21:50:10.69574113 +0000 UTC m=+103.140119280" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.701890 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:10 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:10 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:10 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.701961 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.721443 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" event={"ID":"1cd84235-0b8b-43a0-8d10-6324b5759eac","Type":"ContainerStarted","Data":"ef8447b363867c3497041cd9d8cf2745dc21fd3846058d45f7721d0bc6d55144"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.722461 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.730796 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" event={"ID":"3b1e309e-6542-43b9-95cc-3197be39a203","Type":"ContainerStarted","Data":"e0dd16700e63e2187aa1fa1486da7671a7a6a25ba9c4340591a228c3b204c0b8"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.734087 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nrqrs" podStartSLOduration=83.734064454 podStartE2EDuration="1m23.734064454s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.72972428 +0000 UTC m=+103.174102420" watchObservedRunningTime="2025-12-11 21:50:10.734064454 +0000 UTC m=+103.178442604" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.738468 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" event={"ID":"99b3c2ef-0c86-427c-9c97-f4a9221b69b1","Type":"ContainerStarted","Data":"18be1ed8692b5c2b88942c0e83469c09eff06aa8274d597992bd0e60601c88ae"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.742927 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-jkrgw" event={"ID":"998b11ed-322d-49b3-9a3a-79474037d6ea","Type":"ContainerStarted","Data":"8a55f768d98ae556253c7ef40d2385612d3a6634d8bf7e27e9b48ef7805fbddc"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.749057 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" event={"ID":"78d9268c-ae46-4117-8674-2a7d107831bd","Type":"ContainerStarted","Data":"16e25b275afc9ee5c6b95319dccf657a0011dd5b5d5c2a6764b545ee9179778f"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.749106 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" event={"ID":"78d9268c-ae46-4117-8674-2a7d107831bd","Type":"ContainerStarted","Data":"da5d0331e0f5411aad803e5820089dfc79efc1fe5d763d5d63d6cce863ebed14"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.756460 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" event={"ID":"74c4dba3-53de-449c-9360-9ec5d8a00b1d","Type":"ContainerStarted","Data":"e4f8c8da5532f5fe731373de78b3101978e75f6b023ec265abaf4bc0dc8b67b3"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.756562 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" event={"ID":"74c4dba3-53de-449c-9360-9ec5d8a00b1d","Type":"ContainerStarted","Data":"aaf624b8289478f66723f38658741af17bc53f9c40936fbd95d927a6d4dd26ba"} Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.758213 4956 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d8mkh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.758266 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.773221 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.783971 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.283951246 +0000 UTC m=+103.728329396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.785726 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-lrprk" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.825029 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" podStartSLOduration=83.825011684 podStartE2EDuration="1m23.825011684s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.771061204 +0000 UTC m=+103.215439354" watchObservedRunningTime="2025-12-11 21:50:10.825011684 +0000 UTC m=+103.269389844" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.879571 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.890915 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.892268 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.893810 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.393797686 +0000 UTC m=+103.838175836 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.908836 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-j8hd5" podStartSLOduration=83.908821144 podStartE2EDuration="1m23.908821144s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.863967446 +0000 UTC m=+103.308345596" watchObservedRunningTime="2025-12-11 21:50:10.908821144 +0000 UTC m=+103.353199294" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.968992 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-4mlhz" podStartSLOduration=83.968978438 podStartE2EDuration="1m23.968978438s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.967203421 +0000 UTC m=+103.411581571" watchObservedRunningTime="2025-12-11 21:50:10.968978438 +0000 UTC m=+103.413356578" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.969968 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6f6vb" podStartSLOduration=83.969960754 podStartE2EDuration="1m23.969960754s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:10.914001902 +0000 UTC m=+103.358380052" watchObservedRunningTime="2025-12-11 21:50:10.969960754 +0000 UTC m=+103.414338904" Dec 11 21:50:10 crc kubenswrapper[4956]: I1211 21:50:10.995728 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:10 crc kubenswrapper[4956]: E1211 21:50:10.996705 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.496689892 +0000 UTC m=+103.941068042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.005894 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-56fs7" podStartSLOduration=84.005876045 podStartE2EDuration="1m24.005876045s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.005188407 +0000 UTC m=+103.449566557" watchObservedRunningTime="2025-12-11 21:50:11.005876045 +0000 UTC m=+103.450254195" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.046899 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" podStartSLOduration=84.04687401 podStartE2EDuration="1m24.04687401s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.046389578 +0000 UTC m=+103.490767728" watchObservedRunningTime="2025-12-11 21:50:11.04687401 +0000 UTC m=+103.491252160" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.096871 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.097225 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.597209194 +0000 UTC m=+104.041587344 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.119197 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2mg62" podStartSLOduration=84.119156706 podStartE2EDuration="1m24.119156706s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.084894329 +0000 UTC m=+103.529272479" watchObservedRunningTime="2025-12-11 21:50:11.119156706 +0000 UTC m=+103.563534856" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.184727 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6nc5z" podStartSLOduration=11.184693342 podStartE2EDuration="11.184693342s" podCreationTimestamp="2025-12-11 21:50:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.121069116 +0000 UTC m=+103.565447276" watchObservedRunningTime="2025-12-11 21:50:11.184693342 +0000 UTC m=+103.629071492" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.198228 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.198482 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.698457827 +0000 UTC m=+104.142835977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.198563 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.198839 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.698832196 +0000 UTC m=+104.143210346 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.205239 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n77qx" podStartSLOduration=84.205220506 podStartE2EDuration="1m24.205220506s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.20310808 +0000 UTC m=+103.647486230" watchObservedRunningTime="2025-12-11 21:50:11.205220506 +0000 UTC m=+103.649598656" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.223664 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-jkrgw" podStartSLOduration=84.223647304 podStartE2EDuration="1m24.223647304s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.221194989 +0000 UTC m=+103.665573139" watchObservedRunningTime="2025-12-11 21:50:11.223647304 +0000 UTC m=+103.668025454" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.236694 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l5286"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.237667 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.241346 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.268903 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l5286"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.285961 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6pn79" podStartSLOduration=84.285944924 podStartE2EDuration="1m24.285944924s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.285505192 +0000 UTC m=+103.729883332" watchObservedRunningTime="2025-12-11 21:50:11.285944924 +0000 UTC m=+103.730323074" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.302264 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.302491 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.802452061 +0000 UTC m=+104.246830211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.302694 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gscp5\" (UniqueName: \"kubernetes.io/projected/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-kube-api-access-gscp5\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.302843 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-catalog-content\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.302967 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-utilities\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.303074 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.303437 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.803424837 +0000 UTC m=+104.247802997 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.343092 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-4lmpq" podStartSLOduration=84.343072807 podStartE2EDuration="1m24.343072807s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.341561117 +0000 UTC m=+103.785939277" watchObservedRunningTime="2025-12-11 21:50:11.343072807 +0000 UTC m=+103.787450957" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.404100 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.404246 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.904224428 +0000 UTC m=+104.348602578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.404353 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.404435 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gscp5\" (UniqueName: \"kubernetes.io/projected/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-kube-api-access-gscp5\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.404472 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-catalog-content\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.404531 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-utilities\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.404928 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:11.904918756 +0000 UTC m=+104.349296906 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.405403 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" podStartSLOduration=85.405381839 podStartE2EDuration="1m25.405381839s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.399457731 +0000 UTC m=+103.843835891" watchObservedRunningTime="2025-12-11 21:50:11.405381839 +0000 UTC m=+103.849759989" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.405680 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-utilities\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.405441 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-catalog-content\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.437906 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bn9xr"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.439019 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.441845 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.492828 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bn9xr"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.497602 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gscp5\" (UniqueName: \"kubernetes.io/projected/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-kube-api-access-gscp5\") pod \"community-operators-l5286\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.505228 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.508940 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.00891343 +0000 UTC m=+104.453291580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.524383 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvrgh\" (UniqueName: \"kubernetes.io/projected/aa022a30-6487-45c0-82b7-336a05167918-kube-api-access-jvrgh\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.524551 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.524682 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-catalog-content\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.524820 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-utilities\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.517498 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-fgzkb" podStartSLOduration=84.517479018 podStartE2EDuration="1m24.517479018s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.516700417 +0000 UTC m=+103.961078567" watchObservedRunningTime="2025-12-11 21:50:11.517479018 +0000 UTC m=+103.961857168" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.525810 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.025798028 +0000 UTC m=+104.470176178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.564059 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.566166 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-dkst5" podStartSLOduration=84.566146186 podStartE2EDuration="1m24.566146186s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.564160824 +0000 UTC m=+104.008538974" watchObservedRunningTime="2025-12-11 21:50:11.566146186 +0000 UTC m=+104.010524336" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.606839 4956 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.608559 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-hsjmq" podStartSLOduration=84.60854513 podStartE2EDuration="1m24.60854513s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.606137916 +0000 UTC m=+104.050516066" watchObservedRunningTime="2025-12-11 21:50:11.60854513 +0000 UTC m=+104.052923280" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.631131 4956 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hq2jl container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.15:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.631188 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" podUID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.15:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.631408 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hmhrj"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.632300 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.633022 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.633480 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.133444609 +0000 UTC m=+104.577822759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.634178 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-catalog-content\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.634285 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-utilities\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.634385 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvrgh\" (UniqueName: \"kubernetes.io/projected/aa022a30-6487-45c0-82b7-336a05167918-kube-api-access-jvrgh\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.634504 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.634847 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.134836976 +0000 UTC m=+104.579215126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.635345 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-catalog-content\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.635672 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-utilities\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.642579 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-7phqq" podStartSLOduration=11.642565201 podStartE2EDuration="11.642565201s" podCreationTimestamp="2025-12-11 21:50:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.640568029 +0000 UTC m=+104.084946179" watchObservedRunningTime="2025-12-11 21:50:11.642565201 +0000 UTC m=+104.086943351" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.698852 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvrgh\" (UniqueName: \"kubernetes.io/projected/aa022a30-6487-45c0-82b7-336a05167918-kube-api-access-jvrgh\") pod \"certified-operators-bn9xr\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.701663 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hmhrj"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.723536 4956 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-z6m7g container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.723621 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" podUID="1cd84235-0b8b-43a0-8d10-6324b5759eac" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.17:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.732948 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:11 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:11 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:11 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.733032 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.745452 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.745862 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-utilities\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.745927 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-catalog-content\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.745961 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb4ff\" (UniqueName: \"kubernetes.io/projected/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-kube-api-access-cb4ff\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.746099 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.246078473 +0000 UTC m=+104.690456623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.759757 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" podStartSLOduration=85.759738315 podStartE2EDuration="1m25.759738315s" podCreationTimestamp="2025-12-11 21:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.712154485 +0000 UTC m=+104.156532625" watchObservedRunningTime="2025-12-11 21:50:11.759738315 +0000 UTC m=+104.204116465" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.781482 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7fvb4" podStartSLOduration=84.7814598 podStartE2EDuration="1m24.7814598s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.770670694 +0000 UTC m=+104.215048854" watchObservedRunningTime="2025-12-11 21:50:11.7814598 +0000 UTC m=+104.225837950" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.817382 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.847056 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-utilities\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.847117 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.847156 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-catalog-content\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.847194 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb4ff\" (UniqueName: \"kubernetes.io/projected/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-kube-api-access-cb4ff\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.847984 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-utilities\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.848246 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.34823457 +0000 UTC m=+104.792612720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.848515 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-catalog-content\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.865788 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" event={"ID":"3b1e309e-6542-43b9-95cc-3197be39a203","Type":"ContainerStarted","Data":"694d547a918014a0e4f02493a63cce607c792417192290ae0e2861b357772320"} Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.870000 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" event={"ID":"0e804c82-ec6a-4d08-bd18-d50942c0d985","Type":"ContainerStarted","Data":"cd195de69df5437d2ba5354585799776e876fc4c2e1b65527edb977af6a5d166"} Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.870043 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" event={"ID":"0e804c82-ec6a-4d08-bd18-d50942c0d985","Type":"ContainerStarted","Data":"2e7669b35c34da416e2d46d30880d5627a9cee88581280270b8eaba2fd355f62"} Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.876972 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" event={"ID":"62552ab3-7cb2-4f75-8f3a-75d264a50f66","Type":"ContainerStarted","Data":"5ad42e10b6e552ffa0bff735aab682aae35b97bba125fe5bd042810c711d1228"} Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.877007 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.879062 4956 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d8mkh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.879123 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.880368 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.880408 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.882051 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jkbh9"] Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.884256 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.893544 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.949350 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.950553 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-utilities\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.950608 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-catalog-content\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.950626 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrjcl\" (UniqueName: \"kubernetes.io/projected/de7b66f2-e73f-46ad-80da-a2c834e099c6-kube-api-access-vrjcl\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:11 crc kubenswrapper[4956]: E1211 21:50:11.951793 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.451759892 +0000 UTC m=+104.896138042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:11 crc kubenswrapper[4956]: I1211 21:50:11.997513 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-z6m7g" podStartSLOduration=84.997490933 podStartE2EDuration="1m24.997490933s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:11.952301456 +0000 UTC m=+104.396679606" watchObservedRunningTime="2025-12-11 21:50:11.997490933 +0000 UTC m=+104.441869103" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.013609 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkbh9"] Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.026181 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb4ff\" (UniqueName: \"kubernetes.io/projected/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-kube-api-access-cb4ff\") pod \"community-operators-hmhrj\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.053371 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-utilities\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.053416 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrjcl\" (UniqueName: \"kubernetes.io/projected/de7b66f2-e73f-46ad-80da-a2c834e099c6-kube-api-access-vrjcl\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.053436 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-catalog-content\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.053500 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:12 crc kubenswrapper[4956]: E1211 21:50:12.053830 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.553814836 +0000 UTC m=+104.998192986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.054335 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-utilities\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.076389 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.077982 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-catalog-content\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.135430 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrjcl\" (UniqueName: \"kubernetes.io/projected/de7b66f2-e73f-46ad-80da-a2c834e099c6-kube-api-access-vrjcl\") pod \"certified-operators-jkbh9\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.164103 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:12 crc kubenswrapper[4956]: E1211 21:50:12.164204 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.664190459 +0000 UTC m=+105.108568609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.164354 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:12 crc kubenswrapper[4956]: E1211 21:50:12.166600 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.666590973 +0000 UTC m=+105.110969123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-hnrtk" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.178448 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" podStartSLOduration=85.178433407 podStartE2EDuration="1m25.178433407s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:12.118106469 +0000 UTC m=+104.562484629" watchObservedRunningTime="2025-12-11 21:50:12.178433407 +0000 UTC m=+104.622811557" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.263094 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.265330 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:12 crc kubenswrapper[4956]: E1211 21:50:12.265629 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 21:50:12.765613626 +0000 UTC m=+105.209991776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.291506 4956 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-11T21:50:11.607068521Z","Handler":null,"Name":""} Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.327349 4956 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.327387 4956 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.368091 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.395438 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.397170 4956 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.397196 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.481522 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-hnrtk\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.531288 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l5286"] Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.570631 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.667659 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.694264 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:12 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:12 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:12 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.694321 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.721171 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.751212 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bn9xr"] Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.929794 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hmhrj"] Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.946061 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bn9xr" event={"ID":"aa022a30-6487-45c0-82b7-336a05167918","Type":"ContainerStarted","Data":"836e4b5b40fe2e4fed5e39901112ad5741ded25c541bf117cde82dbcf6b44348"} Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.962122 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerStarted","Data":"28a2de006491ea97b6fe4e5988d1c16a3ce0116eccc30640e3bdc8631d78154d"} Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.962184 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerStarted","Data":"4e62638da06d5c3847b0da986597866cad93e3878130864ec6ee21fdcdda881d"} Dec 11 21:50:12 crc kubenswrapper[4956]: I1211 21:50:12.984118 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" event={"ID":"0e804c82-ec6a-4d08-bd18-d50942c0d985","Type":"ContainerStarted","Data":"01696fd15eddaabd15f4b1e4913c3b0a29dbddac3ecff432d99cf22d49a908bf"} Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.051323 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-nh95j" podStartSLOduration=13.051307299 podStartE2EDuration="13.051307299s" podCreationTimestamp="2025-12-11 21:50:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:13.04908744 +0000 UTC m=+105.493465590" watchObservedRunningTime="2025-12-11 21:50:13.051307299 +0000 UTC m=+105.495685449" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.147515 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jkbh9"] Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.326451 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hnrtk"] Dec 11 21:50:13 crc kubenswrapper[4956]: W1211 21:50:13.332169 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6bcc8182_6e42_4b00_a247_803f1b9bd1d3.slice/crio-d2e8266f2d17334e3c52ad78995eb56b318f07dc15403ae94e34ee9bdf6c8ed8 WatchSource:0}: Error finding container d2e8266f2d17334e3c52ad78995eb56b318f07dc15403ae94e34ee9bdf6c8ed8: Status 404 returned error can't find the container with id d2e8266f2d17334e3c52ad78995eb56b318f07dc15403ae94e34ee9bdf6c8ed8 Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.413702 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.413747 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.418176 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5khrp"] Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.419157 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.421111 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.424277 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.441654 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5khrp"] Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.495886 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmp9j\" (UniqueName: \"kubernetes.io/projected/48520909-a6cd-4ec4-a6db-35a778505823-kube-api-access-vmp9j\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.495982 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-catalog-content\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.496014 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-utilities\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.596868 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmp9j\" (UniqueName: \"kubernetes.io/projected/48520909-a6cd-4ec4-a6db-35a778505823-kube-api-access-vmp9j\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.596992 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-catalog-content\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.597013 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-utilities\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.597431 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-catalog-content\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.597611 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-utilities\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.618810 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmp9j\" (UniqueName: \"kubernetes.io/projected/48520909-a6cd-4ec4-a6db-35a778505823-kube-api-access-vmp9j\") pod \"redhat-marketplace-5khrp\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.663114 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.663175 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.705814 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.712793 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:13 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:13 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:13 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.712875 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.762704 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.810972 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.821292 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rdx2c"] Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.822412 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.840926 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdx2c"] Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.890673 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.890733 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.891688 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.891712 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.908453 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-catalog-content\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.908605 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-utilities\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:13 crc kubenswrapper[4956]: I1211 21:50:13.908673 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzqw5\" (UniqueName: \"kubernetes.io/projected/303d970a-e04c-4ba1-a0da-dd21716371e9-kube-api-access-qzqw5\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.009356 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzqw5\" (UniqueName: \"kubernetes.io/projected/303d970a-e04c-4ba1-a0da-dd21716371e9-kube-api-access-qzqw5\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.009426 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-catalog-content\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.009490 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-utilities\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.009951 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-utilities\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.010824 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-catalog-content\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.043909 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzqw5\" (UniqueName: \"kubernetes.io/projected/303d970a-e04c-4ba1-a0da-dd21716371e9-kube-api-access-qzqw5\") pod \"redhat-marketplace-rdx2c\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.055315 4956 generic.go:334] "Generic (PLEG): container finished" podID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerID="11153b7226851fdee13041aaf93dba4fa43bc0d95de88ad9e0a1b98321cfdae8" exitCode=0 Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.056140 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.056786 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" event={"ID":"6bcc8182-6e42-4b00-a247-803f1b9bd1d3","Type":"ContainerStarted","Data":"d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.056813 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" event={"ID":"6bcc8182-6e42-4b00-a247-803f1b9bd1d3","Type":"ContainerStarted","Data":"d2e8266f2d17334e3c52ad78995eb56b318f07dc15403ae94e34ee9bdf6c8ed8"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.074504 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkbh9" event={"ID":"de7b66f2-e73f-46ad-80da-a2c834e099c6","Type":"ContainerDied","Data":"11153b7226851fdee13041aaf93dba4fa43bc0d95de88ad9e0a1b98321cfdae8"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.074553 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkbh9" event={"ID":"de7b66f2-e73f-46ad-80da-a2c834e099c6","Type":"ContainerStarted","Data":"afb9c1af17d3a04800f1f7073a4f5f032094d475a3d0f50ac9dab1c28179a486"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.074574 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.074585 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bn9xr" event={"ID":"aa022a30-6487-45c0-82b7-336a05167918","Type":"ContainerDied","Data":"331c3c10550457239ac2e38348bf93dab6b136c5c37453343687ebc567560af5"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.065905 4956 generic.go:334] "Generic (PLEG): container finished" podID="aa022a30-6487-45c0-82b7-336a05167918" containerID="331c3c10550457239ac2e38348bf93dab6b136c5c37453343687ebc567560af5" exitCode=0 Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.064020 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.081493 4956 generic.go:334] "Generic (PLEG): container finished" podID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerID="81b0ae5b94cba7605e6cda759f7e64ff82d64f2b7d337b0e44d09b64237129d7" exitCode=0 Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.081829 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmhrj" event={"ID":"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523","Type":"ContainerDied","Data":"81b0ae5b94cba7605e6cda759f7e64ff82d64f2b7d337b0e44d09b64237129d7"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.081867 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmhrj" event={"ID":"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523","Type":"ContainerStarted","Data":"498377653e053befbfac82ec21a2f61592f13a059bb3110e6fb50d63e0e59c42"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.108594 4956 generic.go:334] "Generic (PLEG): container finished" podID="b39bb8fb-c84a-48cc-aa65-b992c06a090b" containerID="cb24e1ea9f15362e076ca5767d1f9ead26156ff22e051077c0197e828a3da69d" exitCode=0 Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.108904 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" event={"ID":"b39bb8fb-c84a-48cc-aa65-b992c06a090b","Type":"ContainerDied","Data":"cb24e1ea9f15362e076ca5767d1f9ead26156ff22e051077c0197e828a3da69d"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.118564 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" podStartSLOduration=87.118541901 podStartE2EDuration="1m27.118541901s" podCreationTimestamp="2025-12-11 21:48:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:14.11245202 +0000 UTC m=+106.556830190" watchObservedRunningTime="2025-12-11 21:50:14.118541901 +0000 UTC m=+106.562920051" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.120128 4956 generic.go:334] "Generic (PLEG): container finished" podID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerID="28a2de006491ea97b6fe4e5988d1c16a3ce0116eccc30640e3bdc8631d78154d" exitCode=0 Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.120628 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerDied","Data":"28a2de006491ea97b6fe4e5988d1c16a3ce0116eccc30640e3bdc8631d78154d"} Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.146159 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.146198 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.148150 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-6rtsw" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.164123 4956 patch_prober.go:28] interesting pod/console-f9d7485db-jkrgw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.164178 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jkrgw" podUID="998b11ed-322d-49b3-9a3a-79474037d6ea" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.260146 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.362949 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5khrp"] Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.440074 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-46xqx"] Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.441189 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.443312 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-46xqx"] Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.458635 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.514969 4956 patch_prober.go:28] interesting pod/apiserver-76f77b778f-f4wrs container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]log ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]etcd ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/max-in-flight-filter ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 11 21:50:14 crc kubenswrapper[4956]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 11 21:50:14 crc kubenswrapper[4956]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/project.openshift.io-projectcache ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/openshift.io-startinformers ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 11 21:50:14 crc kubenswrapper[4956]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 21:50:14 crc kubenswrapper[4956]: livez check failed Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.515389 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" podUID="3b1e309e-6542-43b9-95cc-3197be39a203" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.624014 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xptp7\" (UniqueName: \"kubernetes.io/projected/46394f6a-9e6f-49f8-a879-1753789c4ba0-kube-api-access-xptp7\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.624139 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-utilities\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.624193 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-catalog-content\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.701797 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:14 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:14 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:14 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.701860 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.725662 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-utilities\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.725745 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-catalog-content\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.725823 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xptp7\" (UniqueName: \"kubernetes.io/projected/46394f6a-9e6f-49f8-a879-1753789c4ba0-kube-api-access-xptp7\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.726509 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-utilities\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.726815 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-catalog-content\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.745516 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdx2c"] Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.751955 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xptp7\" (UniqueName: \"kubernetes.io/projected/46394f6a-9e6f-49f8-a879-1753789c4ba0-kube-api-access-xptp7\") pod \"redhat-operators-46xqx\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.803449 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.814076 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tvxwj"] Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.815462 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.825345 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tvxwj"] Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.929733 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-catalog-content\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.930110 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-utilities\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:14 crc kubenswrapper[4956]: I1211 21:50:14.930156 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g7hp\" (UniqueName: \"kubernetes.io/projected/e87ecc79-efd6-4f8c-859b-4c527eaf0225-kube-api-access-5g7hp\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.031629 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-utilities\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.031727 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g7hp\" (UniqueName: \"kubernetes.io/projected/e87ecc79-efd6-4f8c-859b-4c527eaf0225-kube-api-access-5g7hp\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.031849 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-catalog-content\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.032417 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-catalog-content\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.032690 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-utilities\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.049379 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-46xqx"] Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.054232 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g7hp\" (UniqueName: \"kubernetes.io/projected/e87ecc79-efd6-4f8c-859b-4c527eaf0225-kube-api-access-5g7hp\") pod \"redhat-operators-tvxwj\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: W1211 21:50:15.055998 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46394f6a_9e6f_49f8_a879_1753789c4ba0.slice/crio-13f934aa01e4aa609acf0210c84257ec30128b421309fc457f766a5d64382bb0 WatchSource:0}: Error finding container 13f934aa01e4aa609acf0210c84257ec30128b421309fc457f766a5d64382bb0: Status 404 returned error can't find the container with id 13f934aa01e4aa609acf0210c84257ec30128b421309fc457f766a5d64382bb0 Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.131618 4956 generic.go:334] "Generic (PLEG): container finished" podID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerID="64e0b1b6435e2c9669c0c7f0791290e825c64fb1c6471df4358278e38232a41c" exitCode=0 Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.131723 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdx2c" event={"ID":"303d970a-e04c-4ba1-a0da-dd21716371e9","Type":"ContainerDied","Data":"64e0b1b6435e2c9669c0c7f0791290e825c64fb1c6471df4358278e38232a41c"} Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.131753 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdx2c" event={"ID":"303d970a-e04c-4ba1-a0da-dd21716371e9","Type":"ContainerStarted","Data":"1b93cf5e5505f08dd4010de4d246dd27e808035f55ef37226c5896dcc4bae1a6"} Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.134787 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerStarted","Data":"13f934aa01e4aa609acf0210c84257ec30128b421309fc457f766a5d64382bb0"} Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.140935 4956 generic.go:334] "Generic (PLEG): container finished" podID="48520909-a6cd-4ec4-a6db-35a778505823" containerID="117ae9c7ee44b3d6411f31f57c8305711d7cc4cc394892ee174307b7c5bde141" exitCode=0 Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.141186 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5khrp" event={"ID":"48520909-a6cd-4ec4-a6db-35a778505823","Type":"ContainerDied","Data":"117ae9c7ee44b3d6411f31f57c8305711d7cc4cc394892ee174307b7c5bde141"} Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.141394 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5khrp" event={"ID":"48520909-a6cd-4ec4-a6db-35a778505823","Type":"ContainerStarted","Data":"0c6a88443074267a986f576175d375f029859b56609782ea89d7a77b08fcc47e"} Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.179430 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.313741 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.314453 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.320017 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.320191 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.323587 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.474902 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a33b08d-18bc-4f33-a10e-237391ee663e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.474983 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0a33b08d-18bc-4f33-a10e-237391ee663e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.555882 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.579532 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt4wm\" (UniqueName: \"kubernetes.io/projected/b39bb8fb-c84a-48cc-aa65-b992c06a090b-kube-api-access-qt4wm\") pod \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.579600 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b39bb8fb-c84a-48cc-aa65-b992c06a090b-secret-volume\") pod \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.579667 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b39bb8fb-c84a-48cc-aa65-b992c06a090b-config-volume\") pod \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\" (UID: \"b39bb8fb-c84a-48cc-aa65-b992c06a090b\") " Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.579832 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0a33b08d-18bc-4f33-a10e-237391ee663e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.579939 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a33b08d-18bc-4f33-a10e-237391ee663e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.581086 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b39bb8fb-c84a-48cc-aa65-b992c06a090b-config-volume" (OuterVolumeSpecName: "config-volume") pod "b39bb8fb-c84a-48cc-aa65-b992c06a090b" (UID: "b39bb8fb-c84a-48cc-aa65-b992c06a090b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.581089 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0a33b08d-18bc-4f33-a10e-237391ee663e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.595424 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b39bb8fb-c84a-48cc-aa65-b992c06a090b-kube-api-access-qt4wm" (OuterVolumeSpecName: "kube-api-access-qt4wm") pod "b39bb8fb-c84a-48cc-aa65-b992c06a090b" (UID: "b39bb8fb-c84a-48cc-aa65-b992c06a090b"). InnerVolumeSpecName "kube-api-access-qt4wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.596008 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b39bb8fb-c84a-48cc-aa65-b992c06a090b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b39bb8fb-c84a-48cc-aa65-b992c06a090b" (UID: "b39bb8fb-c84a-48cc-aa65-b992c06a090b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.597838 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a33b08d-18bc-4f33-a10e-237391ee663e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.625396 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 21:50:15 crc kubenswrapper[4956]: E1211 21:50:15.625622 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b39bb8fb-c84a-48cc-aa65-b992c06a090b" containerName="collect-profiles" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.625634 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b39bb8fb-c84a-48cc-aa65-b992c06a090b" containerName="collect-profiles" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.626311 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="b39bb8fb-c84a-48cc-aa65-b992c06a090b" containerName="collect-profiles" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.626712 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.631166 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.631424 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.638071 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.638133 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tvxwj"] Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.682055 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af018293-62cb-47a8-8fb7-751ef3d1560f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.682100 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af018293-62cb-47a8-8fb7-751ef3d1560f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.682301 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt4wm\" (UniqueName: \"kubernetes.io/projected/b39bb8fb-c84a-48cc-aa65-b992c06a090b-kube-api-access-qt4wm\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.682332 4956 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b39bb8fb-c84a-48cc-aa65-b992c06a090b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.682345 4956 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b39bb8fb-c84a-48cc-aa65-b992c06a090b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.700387 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.703675 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:15 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:15 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:15 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.703718 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.783336 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af018293-62cb-47a8-8fb7-751ef3d1560f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.783381 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af018293-62cb-47a8-8fb7-751ef3d1560f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.783514 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af018293-62cb-47a8-8fb7-751ef3d1560f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.803425 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af018293-62cb-47a8-8fb7-751ef3d1560f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.921048 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vzvmj" Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.946614 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 21:50:15 crc kubenswrapper[4956]: I1211 21:50:15.979244 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.152879 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" event={"ID":"b39bb8fb-c84a-48cc-aa65-b992c06a090b","Type":"ContainerDied","Data":"8acea8f51dbfa931156b719a185bdd338f0fef629f00e121efc601b0046318b3"} Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.152920 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8acea8f51dbfa931156b719a185bdd338f0fef629f00e121efc601b0046318b3" Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.153021 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm" Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.167242 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0a33b08d-18bc-4f33-a10e-237391ee663e","Type":"ContainerStarted","Data":"808c52f3dd6dd4ff6a859d578e24407c5f0ba5f3c6d2e7d0ac16b9589261617b"} Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.168503 4956 generic.go:334] "Generic (PLEG): container finished" podID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerID="2e3efed30cd9ddd2f1bbafecf0c524a1268e95089b9b3a7eef27c65de5d6024b" exitCode=0 Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.168563 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerDied","Data":"2e3efed30cd9ddd2f1bbafecf0c524a1268e95089b9b3a7eef27c65de5d6024b"} Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.184123 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerStarted","Data":"35cdbe5f4762548d1b2e05b47565ec6a520d078d9f107245e29291c3bb11eb01"} Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.184180 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerStarted","Data":"502c0e335943cddd990ced13d0fd1fcf2560e5b770ac06bdf70e73f8ce3278df"} Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.275662 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 21:50:16 crc kubenswrapper[4956]: W1211 21:50:16.290262 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podaf018293_62cb_47a8_8fb7_751ef3d1560f.slice/crio-a5411f62433e31e7d842459b25486b541f643df809d2443907b71afc776640bd WatchSource:0}: Error finding container a5411f62433e31e7d842459b25486b541f643df809d2443907b71afc776640bd: Status 404 returned error can't find the container with id a5411f62433e31e7d842459b25486b541f643df809d2443907b71afc776640bd Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.696347 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:16 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:16 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:16 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:16 crc kubenswrapper[4956]: I1211 21:50:16.696419 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.231897 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af018293-62cb-47a8-8fb7-751ef3d1560f","Type":"ContainerStarted","Data":"b00910cb2141c2cc983940b15173dc47944eb69c8eda943826e1dbd67487a20a"} Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.232323 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af018293-62cb-47a8-8fb7-751ef3d1560f","Type":"ContainerStarted","Data":"a5411f62433e31e7d842459b25486b541f643df809d2443907b71afc776640bd"} Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.238841 4956 generic.go:334] "Generic (PLEG): container finished" podID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerID="35cdbe5f4762548d1b2e05b47565ec6a520d078d9f107245e29291c3bb11eb01" exitCode=0 Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.238928 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerDied","Data":"35cdbe5f4762548d1b2e05b47565ec6a520d078d9f107245e29291c3bb11eb01"} Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.242306 4956 generic.go:334] "Generic (PLEG): container finished" podID="0a33b08d-18bc-4f33-a10e-237391ee663e" containerID="76e545b6b8a409586872dc44b9c83bae84bea81463c2d0b1d6e3fbfb4c19aece" exitCode=0 Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.242617 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0a33b08d-18bc-4f33-a10e-237391ee663e","Type":"ContainerDied","Data":"76e545b6b8a409586872dc44b9c83bae84bea81463c2d0b1d6e3fbfb4c19aece"} Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.245215 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.245193227 podStartE2EDuration="2.245193227s" podCreationTimestamp="2025-12-11 21:50:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:50:17.245167356 +0000 UTC m=+109.689545516" watchObservedRunningTime="2025-12-11 21:50:17.245193227 +0000 UTC m=+109.689571377" Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.695270 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:17 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:17 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:17 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:17 crc kubenswrapper[4956]: I1211 21:50:17.695350 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.252013 4956 generic.go:334] "Generic (PLEG): container finished" podID="af018293-62cb-47a8-8fb7-751ef3d1560f" containerID="b00910cb2141c2cc983940b15173dc47944eb69c8eda943826e1dbd67487a20a" exitCode=0 Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.252432 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af018293-62cb-47a8-8fb7-751ef3d1560f","Type":"ContainerDied","Data":"b00910cb2141c2cc983940b15173dc47944eb69c8eda943826e1dbd67487a20a"} Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.666864 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.672282 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-f4wrs" Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.724985 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:18 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:18 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:18 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.725031 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:18 crc kubenswrapper[4956]: I1211 21:50:18.865264 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6nc5z" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.083151 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.275382 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a33b08d-18bc-4f33-a10e-237391ee663e-kube-api-access\") pod \"0a33b08d-18bc-4f33-a10e-237391ee663e\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.275473 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0a33b08d-18bc-4f33-a10e-237391ee663e-kubelet-dir\") pod \"0a33b08d-18bc-4f33-a10e-237391ee663e\" (UID: \"0a33b08d-18bc-4f33-a10e-237391ee663e\") " Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.275716 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a33b08d-18bc-4f33-a10e-237391ee663e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0a33b08d-18bc-4f33-a10e-237391ee663e" (UID: "0a33b08d-18bc-4f33-a10e-237391ee663e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.277252 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.277350 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"0a33b08d-18bc-4f33-a10e-237391ee663e","Type":"ContainerDied","Data":"808c52f3dd6dd4ff6a859d578e24407c5f0ba5f3c6d2e7d0ac16b9589261617b"} Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.277377 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="808c52f3dd6dd4ff6a859d578e24407c5f0ba5f3c6d2e7d0ac16b9589261617b" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.306521 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a33b08d-18bc-4f33-a10e-237391ee663e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0a33b08d-18bc-4f33-a10e-237391ee663e" (UID: "0a33b08d-18bc-4f33-a10e-237391ee663e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.387579 4956 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0a33b08d-18bc-4f33-a10e-237391ee663e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.387639 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a33b08d-18bc-4f33-a10e-237391ee663e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.694427 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:19 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:19 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:19 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.694473 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.763998 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.895194 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af018293-62cb-47a8-8fb7-751ef3d1560f-kube-api-access\") pod \"af018293-62cb-47a8-8fb7-751ef3d1560f\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.895353 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af018293-62cb-47a8-8fb7-751ef3d1560f-kubelet-dir\") pod \"af018293-62cb-47a8-8fb7-751ef3d1560f\" (UID: \"af018293-62cb-47a8-8fb7-751ef3d1560f\") " Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.895702 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af018293-62cb-47a8-8fb7-751ef3d1560f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "af018293-62cb-47a8-8fb7-751ef3d1560f" (UID: "af018293-62cb-47a8-8fb7-751ef3d1560f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.904016 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af018293-62cb-47a8-8fb7-751ef3d1560f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "af018293-62cb-47a8-8fb7-751ef3d1560f" (UID: "af018293-62cb-47a8-8fb7-751ef3d1560f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.999648 4956 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af018293-62cb-47a8-8fb7-751ef3d1560f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:19 crc kubenswrapper[4956]: I1211 21:50:19.999695 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af018293-62cb-47a8-8fb7-751ef3d1560f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:50:20 crc kubenswrapper[4956]: I1211 21:50:20.328180 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af018293-62cb-47a8-8fb7-751ef3d1560f","Type":"ContainerDied","Data":"a5411f62433e31e7d842459b25486b541f643df809d2443907b71afc776640bd"} Dec 11 21:50:20 crc kubenswrapper[4956]: I1211 21:50:20.328220 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5411f62433e31e7d842459b25486b541f643df809d2443907b71afc776640bd" Dec 11 21:50:20 crc kubenswrapper[4956]: I1211 21:50:20.328319 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 21:50:20 crc kubenswrapper[4956]: I1211 21:50:20.693844 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:20 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:20 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:20 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:20 crc kubenswrapper[4956]: I1211 21:50:20.693905 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:21 crc kubenswrapper[4956]: I1211 21:50:21.694620 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:21 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:21 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:21 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:21 crc kubenswrapper[4956]: I1211 21:50:21.694670 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:22 crc kubenswrapper[4956]: I1211 21:50:22.693290 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:22 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:22 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:22 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:22 crc kubenswrapper[4956]: I1211 21:50:22.693384 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:23 crc kubenswrapper[4956]: I1211 21:50:23.696356 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:23 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:23 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:23 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:23 crc kubenswrapper[4956]: I1211 21:50:23.696407 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:23 crc kubenswrapper[4956]: I1211 21:50:23.891286 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:23 crc kubenswrapper[4956]: I1211 21:50:23.891377 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:23 crc kubenswrapper[4956]: I1211 21:50:23.891923 4956 patch_prober.go:28] interesting pod/downloads-7954f5f757-ztk92 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Dec 11 21:50:23 crc kubenswrapper[4956]: I1211 21:50:23.891985 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ztk92" podUID="3d0fc4e8-1c1f-4f4a-8dd2-eaf0c3f7b294" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Dec 11 21:50:24 crc kubenswrapper[4956]: I1211 21:50:24.140863 4956 patch_prober.go:28] interesting pod/console-f9d7485db-jkrgw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 11 21:50:24 crc kubenswrapper[4956]: I1211 21:50:24.140921 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-jkrgw" podUID="998b11ed-322d-49b3-9a3a-79474037d6ea" containerName="console" probeResult="failure" output="Get \"https://10.217.0.40:8443/health\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 11 21:50:24 crc kubenswrapper[4956]: I1211 21:50:24.692611 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:24 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:24 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:24 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:24 crc kubenswrapper[4956]: I1211 21:50:24.692668 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:25 crc kubenswrapper[4956]: I1211 21:50:25.695403 4956 patch_prober.go:28] interesting pod/router-default-5444994796-fbpg7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 21:50:25 crc kubenswrapper[4956]: [-]has-synced failed: reason withheld Dec 11 21:50:25 crc kubenswrapper[4956]: [+]process-running ok Dec 11 21:50:25 crc kubenswrapper[4956]: healthz check failed Dec 11 21:50:25 crc kubenswrapper[4956]: I1211 21:50:25.695456 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fbpg7" podUID="1f36b124-c397-4935-82b6-191d83292d1b" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 21:50:26 crc kubenswrapper[4956]: I1211 21:50:26.695372 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:26 crc kubenswrapper[4956]: I1211 21:50:26.697688 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-fbpg7" Dec 11 21:50:32 crc kubenswrapper[4956]: I1211 21:50:32.676661 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:50:33 crc kubenswrapper[4956]: I1211 21:50:33.896823 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-ztk92" Dec 11 21:50:34 crc kubenswrapper[4956]: I1211 21:50:34.161648 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:34 crc kubenswrapper[4956]: I1211 21:50:34.166191 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-jkrgw" Dec 11 21:50:44 crc kubenswrapper[4956]: I1211 21:50:44.066817 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8r689" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.105373 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 21:50:51 crc kubenswrapper[4956]: E1211 21:50:51.105846 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af018293-62cb-47a8-8fb7-751ef3d1560f" containerName="pruner" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.105884 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="af018293-62cb-47a8-8fb7-751ef3d1560f" containerName="pruner" Dec 11 21:50:51 crc kubenswrapper[4956]: E1211 21:50:51.105895 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a33b08d-18bc-4f33-a10e-237391ee663e" containerName="pruner" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.105901 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a33b08d-18bc-4f33-a10e-237391ee663e" containerName="pruner" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.106002 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="af018293-62cb-47a8-8fb7-751ef3d1560f" containerName="pruner" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.106019 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a33b08d-18bc-4f33-a10e-237391ee663e" containerName="pruner" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.106363 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.109382 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.109448 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.130351 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.217529 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.217574 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.318442 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.318497 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.318587 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.338338 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:51 crc kubenswrapper[4956]: I1211 21:50:51.438272 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.174103 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.176050 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.176240 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.176347 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.176519 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.179729 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.180866 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.189040 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.196105 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.202683 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.206403 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.472199 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.481761 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.853575 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.896907 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.897784 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.906591 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.987726 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kube-api-access\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.987804 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:55 crc kubenswrapper[4956]: I1211 21:50:55.987842 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-var-lock\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.060406 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.088937 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kube-api-access\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.089031 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.089089 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-var-lock\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.089289 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-var-lock\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.089362 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.114960 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kube-api-access\") pod \"installer-9-crc\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:50:56 crc kubenswrapper[4956]: I1211 21:50:56.232796 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:51:04 crc kubenswrapper[4956]: E1211 21:51:04.297558 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 21:51:04 crc kubenswrapper[4956]: E1211 21:51:04.298372 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vmp9j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-5khrp_openshift-marketplace(48520909-a6cd-4ec4-a6db-35a778505823): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:04 crc kubenswrapper[4956]: E1211 21:51:04.299906 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-5khrp" podUID="48520909-a6cd-4ec4-a6db-35a778505823" Dec 11 21:51:04 crc kubenswrapper[4956]: E1211 21:51:04.473744 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 21:51:04 crc kubenswrapper[4956]: E1211 21:51:04.473928 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qzqw5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-rdx2c_openshift-marketplace(303d970a-e04c-4ba1-a0da-dd21716371e9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:04 crc kubenswrapper[4956]: E1211 21:51:04.475271 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-rdx2c" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" Dec 11 21:51:05 crc kubenswrapper[4956]: E1211 21:51:05.609919 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-5khrp" podUID="48520909-a6cd-4ec4-a6db-35a778505823" Dec 11 21:51:05 crc kubenswrapper[4956]: E1211 21:51:05.610025 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-rdx2c" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" Dec 11 21:51:05 crc kubenswrapper[4956]: E1211 21:51:05.686710 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 21:51:05 crc kubenswrapper[4956]: E1211 21:51:05.686936 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vrjcl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jkbh9_openshift-marketplace(de7b66f2-e73f-46ad-80da-a2c834e099c6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:05 crc kubenswrapper[4956]: E1211 21:51:05.688243 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jkbh9" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.481258 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jkbh9" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.558470 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.558622 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5g7hp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tvxwj_openshift-marketplace(e87ecc79-efd6-4f8c-859b-4c527eaf0225): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.560138 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-tvxwj" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.608054 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.608226 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jvrgh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bn9xr_openshift-marketplace(aa022a30-6487-45c0-82b7-336a05167918): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.609399 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bn9xr" podUID="aa022a30-6487-45c0-82b7-336a05167918" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.631127 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.631277 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xptp7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-46xqx_openshift-marketplace(46394f6a-9e6f-49f8-a879-1753789c4ba0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:08 crc kubenswrapper[4956]: E1211 21:51:08.632434 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-46xqx" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" Dec 11 21:51:09 crc kubenswrapper[4956]: E1211 21:51:09.985267 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-bn9xr" podUID="aa022a30-6487-45c0-82b7-336a05167918" Dec 11 21:51:09 crc kubenswrapper[4956]: E1211 21:51:09.985555 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-tvxwj" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" Dec 11 21:51:09 crc kubenswrapper[4956]: E1211 21:51:09.985585 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-46xqx" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.049279 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.049451 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gscp5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-l5286_openshift-marketplace(3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.050874 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-l5286" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.057533 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.057689 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cb4ff,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hmhrj_openshift-marketplace(e0e7dba7-84b8-4ec4-9def-5eb44ac9a523): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.058985 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hmhrj" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.421890 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 21:51:10 crc kubenswrapper[4956]: W1211 21:51:10.438029 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf688b90e_6a1e_4fa8_9e27_74376499eb6b.slice/crio-4edf17a9a14d0af285485447223b90242cd077976f67b609f9ebc0179b927f40 WatchSource:0}: Error finding container 4edf17a9a14d0af285485447223b90242cd077976f67b609f9ebc0179b927f40: Status 404 returned error can't find the container with id 4edf17a9a14d0af285485447223b90242cd077976f67b609f9ebc0179b927f40 Dec 11 21:51:10 crc kubenswrapper[4956]: W1211 21:51:10.441908 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-52b4907230b7e0d027b2680c56c9120e099b33ddc74b4438e7a23d7a5dfcc3c4 WatchSource:0}: Error finding container 52b4907230b7e0d027b2680c56c9120e099b33ddc74b4438e7a23d7a5dfcc3c4: Status 404 returned error can't find the container with id 52b4907230b7e0d027b2680c56c9120e099b33ddc74b4438e7a23d7a5dfcc3c4 Dec 11 21:51:10 crc kubenswrapper[4956]: W1211 21:51:10.496279 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-b8404ba2a1e2da8192dcbee7f345ef424dfac0852aa4e49121ae2901876099f6 WatchSource:0}: Error finding container b8404ba2a1e2da8192dcbee7f345ef424dfac0852aa4e49121ae2901876099f6: Status 404 returned error can't find the container with id b8404ba2a1e2da8192dcbee7f345ef424dfac0852aa4e49121ae2901876099f6 Dec 11 21:51:10 crc kubenswrapper[4956]: W1211 21:51:10.534412 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-a84f00ee2a489d105f3649b89421dcbeef04c9b410da7c7b7e263f059a5d6fa0 WatchSource:0}: Error finding container a84f00ee2a489d105f3649b89421dcbeef04c9b410da7c7b7e263f059a5d6fa0: Status 404 returned error can't find the container with id a84f00ee2a489d105f3649b89421dcbeef04c9b410da7c7b7e263f059a5d6fa0 Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.542071 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.692670 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0c0833b6e33480636cfdd528f28eaa167ae8c8b28af9eaa311ce9952b9dc5667"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.692993 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a84f00ee2a489d105f3649b89421dcbeef04c9b410da7c7b7e263f059a5d6fa0"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.694306 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f688b90e-6a1e-4fa8-9e27-74376499eb6b","Type":"ContainerStarted","Data":"4edf17a9a14d0af285485447223b90242cd077976f67b609f9ebc0179b927f40"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.697800 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7","Type":"ContainerStarted","Data":"447eb4603cf8908b29bf663d49ad23819a1e49b632e42a908a3c21ba7673ac11"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.700348 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a387fcbeab4055f2f33309e74afc6f7684440226ce052b7efb910e24be4470db"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.700376 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"52b4907230b7e0d027b2680c56c9120e099b33ddc74b4438e7a23d7a5dfcc3c4"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.700557 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.703139 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"10df474cf7f3fff934a2bcaa7157e859d27da56d1cd3a94d7130cc0e4fbf8fb0"} Dec 11 21:51:10 crc kubenswrapper[4956]: I1211 21:51:10.703180 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b8404ba2a1e2da8192dcbee7f345ef424dfac0852aa4e49121ae2901876099f6"} Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.704791 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-hmhrj" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" Dec 11 21:51:10 crc kubenswrapper[4956]: E1211 21:51:10.704859 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-l5286" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" Dec 11 21:51:11 crc kubenswrapper[4956]: I1211 21:51:11.711452 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f688b90e-6a1e-4fa8-9e27-74376499eb6b","Type":"ContainerStarted","Data":"830e0de768e749512b91e67641bbeb5fb1bacdf9e9c9e2254ebbb38f4e58cde6"} Dec 11 21:51:11 crc kubenswrapper[4956]: I1211 21:51:11.714384 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7","Type":"ContainerStarted","Data":"b022cc3de8fa866a99237c3aa3823b504ab115c3bcecdcb6e97a32492a115d45"} Dec 11 21:51:11 crc kubenswrapper[4956]: I1211 21:51:11.743261 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=16.7432476 podStartE2EDuration="16.7432476s" podCreationTimestamp="2025-12-11 21:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:51:11.728112907 +0000 UTC m=+164.172491057" watchObservedRunningTime="2025-12-11 21:51:11.7432476 +0000 UTC m=+164.187625750" Dec 11 21:51:12 crc kubenswrapper[4956]: I1211 21:51:12.721058 4956 generic.go:334] "Generic (PLEG): container finished" podID="f1ea9764-6b62-49b0-950e-d7bfde1fb6c7" containerID="b022cc3de8fa866a99237c3aa3823b504ab115c3bcecdcb6e97a32492a115d45" exitCode=0 Dec 11 21:51:12 crc kubenswrapper[4956]: I1211 21:51:12.721141 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7","Type":"ContainerDied","Data":"b022cc3de8fa866a99237c3aa3823b504ab115c3bcecdcb6e97a32492a115d45"} Dec 11 21:51:13 crc kubenswrapper[4956]: I1211 21:51:13.966596 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.071299 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kubelet-dir\") pod \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.071361 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kube-api-access\") pod \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\" (UID: \"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7\") " Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.071695 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f1ea9764-6b62-49b0-950e-d7bfde1fb6c7" (UID: "f1ea9764-6b62-49b0-950e-d7bfde1fb6c7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.071870 4956 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.081498 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f1ea9764-6b62-49b0-950e-d7bfde1fb6c7" (UID: "f1ea9764-6b62-49b0-950e-d7bfde1fb6c7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.173173 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f1ea9764-6b62-49b0-950e-d7bfde1fb6c7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.733012 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f1ea9764-6b62-49b0-950e-d7bfde1fb6c7","Type":"ContainerDied","Data":"447eb4603cf8908b29bf663d49ad23819a1e49b632e42a908a3c21ba7673ac11"} Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.733056 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="447eb4603cf8908b29bf663d49ad23819a1e49b632e42a908a3c21ba7673ac11" Dec 11 21:51:14 crc kubenswrapper[4956]: I1211 21:51:14.733136 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 21:51:16 crc kubenswrapper[4956]: I1211 21:51:16.888427 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:51:16 crc kubenswrapper[4956]: I1211 21:51:16.888825 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.860383 4956 generic.go:334] "Generic (PLEG): container finished" podID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerID="452a0f3220980363d3b38eea119a1b4619b5575284477210ee4ca62929157c76" exitCode=0 Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.860438 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkbh9" event={"ID":"de7b66f2-e73f-46ad-80da-a2c834e099c6","Type":"ContainerDied","Data":"452a0f3220980363d3b38eea119a1b4619b5575284477210ee4ca62929157c76"} Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.864086 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerStarted","Data":"c52d39074cc0c6605380b7da7ef7f02a6e38b53f91ce465ef1f381052af80017"} Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.865960 4956 generic.go:334] "Generic (PLEG): container finished" podID="aa022a30-6487-45c0-82b7-336a05167918" containerID="e5b084efce5e1c164208abf5b6fe5b0103b19bbe4e1343c802aeeece29884836" exitCode=0 Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.866044 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bn9xr" event={"ID":"aa022a30-6487-45c0-82b7-336a05167918","Type":"ContainerDied","Data":"e5b084efce5e1c164208abf5b6fe5b0103b19bbe4e1343c802aeeece29884836"} Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.881263 4956 generic.go:334] "Generic (PLEG): container finished" podID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerID="30d281b059e9fe33c1446ea8f9098753347f01e3980a00fa2f52fd7aefabde95" exitCode=0 Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.881323 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmhrj" event={"ID":"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523","Type":"ContainerDied","Data":"30d281b059e9fe33c1446ea8f9098753347f01e3980a00fa2f52fd7aefabde95"} Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.885536 4956 generic.go:334] "Generic (PLEG): container finished" podID="48520909-a6cd-4ec4-a6db-35a778505823" containerID="1cb114b799a9b35da1d1cc8c6141c02fcf639b3a46a52fc220e34558c6f4656e" exitCode=0 Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.885612 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5khrp" event={"ID":"48520909-a6cd-4ec4-a6db-35a778505823","Type":"ContainerDied","Data":"1cb114b799a9b35da1d1cc8c6141c02fcf639b3a46a52fc220e34558c6f4656e"} Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.895654 4956 generic.go:334] "Generic (PLEG): container finished" podID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerID="c599b95c1217e839f0a604c544e7e0ca3c5834eb49d5773d41908988dd824e0c" exitCode=0 Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.895747 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerDied","Data":"c599b95c1217e839f0a604c544e7e0ca3c5834eb49d5773d41908988dd824e0c"} Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.905139 4956 generic.go:334] "Generic (PLEG): container finished" podID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerID="13bc473f141257db02588b5ec0b8f13fe7311668995e28ae097509d43c496fae" exitCode=0 Dec 11 21:51:29 crc kubenswrapper[4956]: I1211 21:51:29.905193 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdx2c" event={"ID":"303d970a-e04c-4ba1-a0da-dd21716371e9","Type":"ContainerDied","Data":"13bc473f141257db02588b5ec0b8f13fe7311668995e28ae097509d43c496fae"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.912623 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerStarted","Data":"c3fed00e13fa0add010ec9560c6db643ca46e509c24b073f0fc87f79ed7cc9ef"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.918455 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdx2c" event={"ID":"303d970a-e04c-4ba1-a0da-dd21716371e9","Type":"ContainerStarted","Data":"9335a66dd2a7734e785def5780e2f5dfebb91dabc8af29947f781320446be01d"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.920345 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerStarted","Data":"1ee7042c64ca84873bb5bdffc706449bb401c9212ef9926bf6eb8a1e6210b1e6"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.924599 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkbh9" event={"ID":"de7b66f2-e73f-46ad-80da-a2c834e099c6","Type":"ContainerStarted","Data":"8ddde538e38cdbaf4724a4fea8d1288d968320cac3f99704c71f0399e2dd4113"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.926794 4956 generic.go:334] "Generic (PLEG): container finished" podID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerID="c52d39074cc0c6605380b7da7ef7f02a6e38b53f91ce465ef1f381052af80017" exitCode=0 Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.926862 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerDied","Data":"c52d39074cc0c6605380b7da7ef7f02a6e38b53f91ce465ef1f381052af80017"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.928992 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bn9xr" event={"ID":"aa022a30-6487-45c0-82b7-336a05167918","Type":"ContainerStarted","Data":"5f5b3e7a1aeebd57d07d9847751f28ecaf9a69ffe563c58100158050e3ec5f74"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.933263 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmhrj" event={"ID":"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523","Type":"ContainerStarted","Data":"179a068c34a95783cb10f4dd4f07c8804026fc46f3bd5107b59bd26e89e4d41c"} Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.934989 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l5286" podStartSLOduration=3.481177303 podStartE2EDuration="1m19.934978005s" podCreationTimestamp="2025-12-11 21:50:11 +0000 UTC" firstStartedPulling="2025-12-11 21:50:14.128514185 +0000 UTC m=+106.572892335" lastFinishedPulling="2025-12-11 21:51:30.582314887 +0000 UTC m=+183.026693037" observedRunningTime="2025-12-11 21:51:30.930958195 +0000 UTC m=+183.375336375" watchObservedRunningTime="2025-12-11 21:51:30.934978005 +0000 UTC m=+183.379356155" Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.953831 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jkbh9" podStartSLOduration=3.504981162 podStartE2EDuration="1m19.953808848s" podCreationTimestamp="2025-12-11 21:50:11 +0000 UTC" firstStartedPulling="2025-12-11 21:50:14.063735739 +0000 UTC m=+106.508113889" lastFinishedPulling="2025-12-11 21:51:30.512563425 +0000 UTC m=+182.956941575" observedRunningTime="2025-12-11 21:51:30.952261746 +0000 UTC m=+183.396639906" watchObservedRunningTime="2025-12-11 21:51:30.953808848 +0000 UTC m=+183.398186998" Dec 11 21:51:30 crc kubenswrapper[4956]: I1211 21:51:30.971925 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bn9xr" podStartSLOduration=3.68857228 podStartE2EDuration="1m19.971908212s" podCreationTimestamp="2025-12-11 21:50:11 +0000 UTC" firstStartedPulling="2025-12-11 21:50:14.075965793 +0000 UTC m=+106.520343943" lastFinishedPulling="2025-12-11 21:51:30.359301725 +0000 UTC m=+182.803679875" observedRunningTime="2025-12-11 21:51:30.969852045 +0000 UTC m=+183.414230205" watchObservedRunningTime="2025-12-11 21:51:30.971908212 +0000 UTC m=+183.416286362" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.037292 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rdx2c" podStartSLOduration=2.676007813 podStartE2EDuration="1m18.037253734s" podCreationTimestamp="2025-12-11 21:50:13 +0000 UTC" firstStartedPulling="2025-12-11 21:50:15.133285722 +0000 UTC m=+107.577663872" lastFinishedPulling="2025-12-11 21:51:30.494531623 +0000 UTC m=+182.938909793" observedRunningTime="2025-12-11 21:51:31.015251884 +0000 UTC m=+183.459630064" watchObservedRunningTime="2025-12-11 21:51:31.037253734 +0000 UTC m=+183.481631904" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.058856 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hmhrj" podStartSLOduration=3.738098299 podStartE2EDuration="1m20.058834242s" podCreationTimestamp="2025-12-11 21:50:11 +0000 UTC" firstStartedPulling="2025-12-11 21:50:14.089742908 +0000 UTC m=+106.534121058" lastFinishedPulling="2025-12-11 21:51:30.410478851 +0000 UTC m=+182.854857001" observedRunningTime="2025-12-11 21:51:31.055176482 +0000 UTC m=+183.499554632" watchObservedRunningTime="2025-12-11 21:51:31.058834242 +0000 UTC m=+183.503212392" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.565613 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.565989 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.818356 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.818420 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.945730 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerStarted","Data":"dfefaaf566cfcc646526855f975120112b68e499ea8c501017b68f6d1fac8eb7"} Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.948601 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5khrp" event={"ID":"48520909-a6cd-4ec4-a6db-35a778505823","Type":"ContainerStarted","Data":"4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d"} Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.950761 4956 generic.go:334] "Generic (PLEG): container finished" podID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerID="1ee7042c64ca84873bb5bdffc706449bb401c9212ef9926bf6eb8a1e6210b1e6" exitCode=0 Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.950858 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerDied","Data":"1ee7042c64ca84873bb5bdffc706449bb401c9212ef9926bf6eb8a1e6210b1e6"} Dec 11 21:51:31 crc kubenswrapper[4956]: I1211 21:51:31.977575 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tvxwj" podStartSLOduration=3.634181839 podStartE2EDuration="1m17.977554036s" podCreationTimestamp="2025-12-11 21:50:14 +0000 UTC" firstStartedPulling="2025-12-11 21:50:17.241201092 +0000 UTC m=+109.685579242" lastFinishedPulling="2025-12-11 21:51:31.584573289 +0000 UTC m=+184.028951439" observedRunningTime="2025-12-11 21:51:31.97548798 +0000 UTC m=+184.419866140" watchObservedRunningTime="2025-12-11 21:51:31.977554036 +0000 UTC m=+184.421932186" Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.020418 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5khrp" podStartSLOduration=3.288011283 podStartE2EDuration="1m19.020399555s" podCreationTimestamp="2025-12-11 21:50:13 +0000 UTC" firstStartedPulling="2025-12-11 21:50:15.14564078 +0000 UTC m=+107.590018930" lastFinishedPulling="2025-12-11 21:51:30.878029052 +0000 UTC m=+183.322407202" observedRunningTime="2025-12-11 21:51:32.020035364 +0000 UTC m=+184.464413524" watchObservedRunningTime="2025-12-11 21:51:32.020399555 +0000 UTC m=+184.464777705" Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.077599 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.077717 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.264687 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.264759 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.625167 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-l5286" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="registry-server" probeResult="failure" output=< Dec 11 21:51:32 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 21:51:32 crc kubenswrapper[4956]: > Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.860264 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-bn9xr" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="registry-server" probeResult="failure" output=< Dec 11 21:51:32 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 21:51:32 crc kubenswrapper[4956]: > Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.960110 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerStarted","Data":"99678f6232902c257785529f2af5870913f4f166fde8ad1851a1faa18bc17b13"} Dec 11 21:51:32 crc kubenswrapper[4956]: I1211 21:51:32.984746 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-46xqx" podStartSLOduration=2.708045926 podStartE2EDuration="1m18.984726732s" podCreationTimestamp="2025-12-11 21:50:14 +0000 UTC" firstStartedPulling="2025-12-11 21:50:16.169956413 +0000 UTC m=+108.614334563" lastFinishedPulling="2025-12-11 21:51:32.446637209 +0000 UTC m=+184.891015369" observedRunningTime="2025-12-11 21:51:32.981047963 +0000 UTC m=+185.425426133" watchObservedRunningTime="2025-12-11 21:51:32.984726732 +0000 UTC m=+185.429104892" Dec 11 21:51:33 crc kubenswrapper[4956]: I1211 21:51:33.114686 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-hmhrj" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="registry-server" probeResult="failure" output=< Dec 11 21:51:33 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 21:51:33 crc kubenswrapper[4956]: > Dec 11 21:51:33 crc kubenswrapper[4956]: I1211 21:51:33.300070 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jkbh9" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="registry-server" probeResult="failure" output=< Dec 11 21:51:33 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 21:51:33 crc kubenswrapper[4956]: > Dec 11 21:51:33 crc kubenswrapper[4956]: I1211 21:51:33.811963 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:51:33 crc kubenswrapper[4956]: I1211 21:51:33.812018 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:51:33 crc kubenswrapper[4956]: I1211 21:51:33.857480 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:51:34 crc kubenswrapper[4956]: I1211 21:51:34.261631 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:51:34 crc kubenswrapper[4956]: I1211 21:51:34.261839 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:51:34 crc kubenswrapper[4956]: I1211 21:51:34.300864 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:51:34 crc kubenswrapper[4956]: I1211 21:51:34.804957 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:51:34 crc kubenswrapper[4956]: I1211 21:51:34.805372 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:51:35 crc kubenswrapper[4956]: I1211 21:51:35.181849 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:51:35 crc kubenswrapper[4956]: I1211 21:51:35.181950 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:51:35 crc kubenswrapper[4956]: I1211 21:51:35.843028 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-46xqx" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="registry-server" probeResult="failure" output=< Dec 11 21:51:35 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 21:51:35 crc kubenswrapper[4956]: > Dec 11 21:51:36 crc kubenswrapper[4956]: I1211 21:51:36.030268 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:51:36 crc kubenswrapper[4956]: I1211 21:51:36.230832 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tvxwj" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="registry-server" probeResult="failure" output=< Dec 11 21:51:36 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 21:51:36 crc kubenswrapper[4956]: > Dec 11 21:51:37 crc kubenswrapper[4956]: I1211 21:51:37.093606 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdx2c"] Dec 11 21:51:37 crc kubenswrapper[4956]: I1211 21:51:37.990251 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rdx2c" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="registry-server" containerID="cri-o://9335a66dd2a7734e785def5780e2f5dfebb91dabc8af29947f781320446be01d" gracePeriod=2 Dec 11 21:51:40 crc kubenswrapper[4956]: I1211 21:51:40.003682 4956 generic.go:334] "Generic (PLEG): container finished" podID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerID="9335a66dd2a7734e785def5780e2f5dfebb91dabc8af29947f781320446be01d" exitCode=0 Dec 11 21:51:40 crc kubenswrapper[4956]: I1211 21:51:40.003737 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdx2c" event={"ID":"303d970a-e04c-4ba1-a0da-dd21716371e9","Type":"ContainerDied","Data":"9335a66dd2a7734e785def5780e2f5dfebb91dabc8af29947f781320446be01d"} Dec 11 21:51:41 crc kubenswrapper[4956]: I1211 21:51:41.616144 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:51:41 crc kubenswrapper[4956]: I1211 21:51:41.665968 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:51:41 crc kubenswrapper[4956]: I1211 21:51:41.850448 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:51:41 crc kubenswrapper[4956]: I1211 21:51:41.891221 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.116900 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.170219 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.299148 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.341461 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.354550 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.450457 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzqw5\" (UniqueName: \"kubernetes.io/projected/303d970a-e04c-4ba1-a0da-dd21716371e9-kube-api-access-qzqw5\") pod \"303d970a-e04c-4ba1-a0da-dd21716371e9\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.450515 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-utilities\") pod \"303d970a-e04c-4ba1-a0da-dd21716371e9\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.450572 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-catalog-content\") pod \"303d970a-e04c-4ba1-a0da-dd21716371e9\" (UID: \"303d970a-e04c-4ba1-a0da-dd21716371e9\") " Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.451300 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-utilities" (OuterVolumeSpecName: "utilities") pod "303d970a-e04c-4ba1-a0da-dd21716371e9" (UID: "303d970a-e04c-4ba1-a0da-dd21716371e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.457936 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/303d970a-e04c-4ba1-a0da-dd21716371e9-kube-api-access-qzqw5" (OuterVolumeSpecName: "kube-api-access-qzqw5") pod "303d970a-e04c-4ba1-a0da-dd21716371e9" (UID: "303d970a-e04c-4ba1-a0da-dd21716371e9"). InnerVolumeSpecName "kube-api-access-qzqw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.470888 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "303d970a-e04c-4ba1-a0da-dd21716371e9" (UID: "303d970a-e04c-4ba1-a0da-dd21716371e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.552048 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.552081 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzqw5\" (UniqueName: \"kubernetes.io/projected/303d970a-e04c-4ba1-a0da-dd21716371e9-kube-api-access-qzqw5\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:42 crc kubenswrapper[4956]: I1211 21:51:42.552093 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303d970a-e04c-4ba1-a0da-dd21716371e9-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.027013 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdx2c" Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.026965 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdx2c" event={"ID":"303d970a-e04c-4ba1-a0da-dd21716371e9","Type":"ContainerDied","Data":"1b93cf5e5505f08dd4010de4d246dd27e808035f55ef37226c5896dcc4bae1a6"} Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.027177 4956 scope.go:117] "RemoveContainer" containerID="9335a66dd2a7734e785def5780e2f5dfebb91dabc8af29947f781320446be01d" Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.055793 4956 scope.go:117] "RemoveContainer" containerID="13bc473f141257db02588b5ec0b8f13fe7311668995e28ae097509d43c496fae" Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.079277 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdx2c"] Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.083207 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdx2c"] Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.084944 4956 scope.go:117] "RemoveContainer" containerID="64e0b1b6435e2c9669c0c7f0791290e825c64fb1c6471df4358278e38232a41c" Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.654492 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hmhrj"] Dec 11 21:51:43 crc kubenswrapper[4956]: I1211 21:51:43.896030 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.029885 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" path="/var/lib/kubelet/pods/303d970a-e04c-4ba1-a0da-dd21716371e9/volumes" Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.033712 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hmhrj" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="registry-server" containerID="cri-o://179a068c34a95783cb10f4dd4f07c8804026fc46f3bd5107b59bd26e89e4d41c" gracePeriod=2 Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.252346 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkbh9"] Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.252592 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jkbh9" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="registry-server" containerID="cri-o://8ddde538e38cdbaf4724a4fea8d1288d968320cac3f99704c71f0399e2dd4113" gracePeriod=2 Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.879589 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.888529 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hq2jl"] Dec 11 21:51:44 crc kubenswrapper[4956]: I1211 21:51:44.949632 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:51:45 crc kubenswrapper[4956]: I1211 21:51:45.040172 4956 generic.go:334] "Generic (PLEG): container finished" podID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerID="8ddde538e38cdbaf4724a4fea8d1288d968320cac3f99704c71f0399e2dd4113" exitCode=0 Dec 11 21:51:45 crc kubenswrapper[4956]: I1211 21:51:45.040254 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkbh9" event={"ID":"de7b66f2-e73f-46ad-80da-a2c834e099c6","Type":"ContainerDied","Data":"8ddde538e38cdbaf4724a4fea8d1288d968320cac3f99704c71f0399e2dd4113"} Dec 11 21:51:45 crc kubenswrapper[4956]: I1211 21:51:45.219301 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:51:45 crc kubenswrapper[4956]: I1211 21:51:45.275429 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:51:45 crc kubenswrapper[4956]: I1211 21:51:45.549023 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 21:51:45 crc kubenswrapper[4956]: I1211 21:51:45.890264 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.001494 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-catalog-content\") pod \"de7b66f2-e73f-46ad-80da-a2c834e099c6\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.001550 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrjcl\" (UniqueName: \"kubernetes.io/projected/de7b66f2-e73f-46ad-80da-a2c834e099c6-kube-api-access-vrjcl\") pod \"de7b66f2-e73f-46ad-80da-a2c834e099c6\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.001592 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-utilities\") pod \"de7b66f2-e73f-46ad-80da-a2c834e099c6\" (UID: \"de7b66f2-e73f-46ad-80da-a2c834e099c6\") " Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.002676 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-utilities" (OuterVolumeSpecName: "utilities") pod "de7b66f2-e73f-46ad-80da-a2c834e099c6" (UID: "de7b66f2-e73f-46ad-80da-a2c834e099c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.007166 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de7b66f2-e73f-46ad-80da-a2c834e099c6-kube-api-access-vrjcl" (OuterVolumeSpecName: "kube-api-access-vrjcl") pod "de7b66f2-e73f-46ad-80da-a2c834e099c6" (UID: "de7b66f2-e73f-46ad-80da-a2c834e099c6"). InnerVolumeSpecName "kube-api-access-vrjcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.047829 4956 generic.go:334] "Generic (PLEG): container finished" podID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerID="179a068c34a95783cb10f4dd4f07c8804026fc46f3bd5107b59bd26e89e4d41c" exitCode=0 Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.050043 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jkbh9" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.056588 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de7b66f2-e73f-46ad-80da-a2c834e099c6" (UID: "de7b66f2-e73f-46ad-80da-a2c834e099c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.060819 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmhrj" event={"ID":"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523","Type":"ContainerDied","Data":"179a068c34a95783cb10f4dd4f07c8804026fc46f3bd5107b59bd26e89e4d41c"} Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.060857 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmhrj" event={"ID":"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523","Type":"ContainerDied","Data":"498377653e053befbfac82ec21a2f61592f13a059bb3110e6fb50d63e0e59c42"} Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.060867 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="498377653e053befbfac82ec21a2f61592f13a059bb3110e6fb50d63e0e59c42" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.060877 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jkbh9" event={"ID":"de7b66f2-e73f-46ad-80da-a2c834e099c6","Type":"ContainerDied","Data":"afb9c1af17d3a04800f1f7073a4f5f032094d475a3d0f50ac9dab1c28179a486"} Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.060904 4956 scope.go:117] "RemoveContainer" containerID="8ddde538e38cdbaf4724a4fea8d1288d968320cac3f99704c71f0399e2dd4113" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.067230 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.082890 4956 scope.go:117] "RemoveContainer" containerID="452a0f3220980363d3b38eea119a1b4619b5575284477210ee4ca62929157c76" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.102287 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-catalog-content\") pod \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.102345 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-utilities\") pod \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.102418 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb4ff\" (UniqueName: \"kubernetes.io/projected/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-kube-api-access-cb4ff\") pod \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\" (UID: \"e0e7dba7-84b8-4ec4-9def-5eb44ac9a523\") " Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.102627 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.102643 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrjcl\" (UniqueName: \"kubernetes.io/projected/de7b66f2-e73f-46ad-80da-a2c834e099c6-kube-api-access-vrjcl\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.102653 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de7b66f2-e73f-46ad-80da-a2c834e099c6-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.104413 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-utilities" (OuterVolumeSpecName: "utilities") pod "e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" (UID: "e0e7dba7-84b8-4ec4-9def-5eb44ac9a523"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.104826 4956 scope.go:117] "RemoveContainer" containerID="11153b7226851fdee13041aaf93dba4fa43bc0d95de88ad9e0a1b98321cfdae8" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.105876 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-kube-api-access-cb4ff" (OuterVolumeSpecName: "kube-api-access-cb4ff") pod "e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" (UID: "e0e7dba7-84b8-4ec4-9def-5eb44ac9a523"). InnerVolumeSpecName "kube-api-access-cb4ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.149671 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" (UID: "e0e7dba7-84b8-4ec4-9def-5eb44ac9a523"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.203926 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb4ff\" (UniqueName: \"kubernetes.io/projected/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-kube-api-access-cb4ff\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.203965 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.203982 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.380902 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jkbh9"] Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.388778 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jkbh9"] Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.888360 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:51:46 crc kubenswrapper[4956]: I1211 21:51:46.889071 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:51:47 crc kubenswrapper[4956]: I1211 21:51:47.055586 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmhrj" Dec 11 21:51:47 crc kubenswrapper[4956]: I1211 21:51:47.086253 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hmhrj"] Dec 11 21:51:47 crc kubenswrapper[4956]: I1211 21:51:47.088720 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hmhrj"] Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.031100 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" path="/var/lib/kubelet/pods/de7b66f2-e73f-46ad-80da-a2c834e099c6/volumes" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.032027 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" path="/var/lib/kubelet/pods/e0e7dba7-84b8-4ec4-9def-5eb44ac9a523/volumes" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451456 4956 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451681 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451695 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451707 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451713 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451723 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="extract-utilities" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451729 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="extract-utilities" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451740 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="extract-utilities" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451746 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="extract-utilities" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451753 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451758 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451776 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1ea9764-6b62-49b0-950e-d7bfde1fb6c7" containerName="pruner" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451800 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1ea9764-6b62-49b0-950e-d7bfde1fb6c7" containerName="pruner" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451806 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="extract-utilities" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451812 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="extract-utilities" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451819 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="extract-content" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451824 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="extract-content" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451833 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="extract-content" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451839 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="extract-content" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.451848 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="extract-content" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451854 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="extract-content" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451943 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="303d970a-e04c-4ba1-a0da-dd21716371e9" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451952 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1ea9764-6b62-49b0-950e-d7bfde1fb6c7" containerName="pruner" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451963 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0e7dba7-84b8-4ec4-9def-5eb44ac9a523" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.451972 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7b66f2-e73f-46ad-80da-a2c834e099c6" containerName="registry-server" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452262 4956 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452562 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452590 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc" gracePeriod=15 Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452554 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40" gracePeriod=15 Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452646 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd" gracePeriod=15 Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452498 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59" gracePeriod=15 Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.452562 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df" gracePeriod=15 Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453353 4956 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453537 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453551 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453567 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453575 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453585 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453592 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453602 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453608 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453619 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453627 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453635 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453642 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.453653 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453660 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453851 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453871 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453896 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453918 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453934 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.453951 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.495422 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534445 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534553 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534592 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534641 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534728 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534796 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534854 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.534885 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639472 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639564 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639860 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639811 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639906 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639965 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.639981 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640022 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640056 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640072 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640100 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640115 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640133 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640128 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640192 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.640199 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: I1211 21:51:48.793279 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:51:48 crc kubenswrapper[4956]: W1211 21:51:48.810246 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-7cfa9c77b1766e5325ebf6103681092a9b0f1ad0254747985c1dcb72c35bdf29 WatchSource:0}: Error finding container 7cfa9c77b1766e5325ebf6103681092a9b0f1ad0254747985c1dcb72c35bdf29: Status 404 returned error can't find the container with id 7cfa9c77b1766e5325ebf6103681092a9b0f1ad0254747985c1dcb72c35bdf29 Dec 11 21:51:48 crc kubenswrapper[4956]: E1211 21:51:48.813047 4956 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.83:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188047b7d54d48c1 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 21:51:48.812433601 +0000 UTC m=+201.256811761,LastTimestamp:2025-12-11 21:51:48.812433601 +0000 UTC m=+201.256811761,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 21:51:49 crc kubenswrapper[4956]: I1211 21:51:49.069702 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7cfa9c77b1766e5325ebf6103681092a9b0f1ad0254747985c1dcb72c35bdf29"} Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.078183 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.080662 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.081731 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc" exitCode=0 Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.081807 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df" exitCode=0 Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.081826 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40" exitCode=0 Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.081843 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd" exitCode=2 Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.081856 4956 scope.go:117] "RemoveContainer" containerID="2d5f13ee2a3c539e898bd8592f65b41d5003a23cc4363634a0d33f98594dca41" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.083684 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc"} Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.085111 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.087974 4956 generic.go:334] "Generic (PLEG): container finished" podID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" containerID="830e0de768e749512b91e67641bbeb5fb1bacdf9e9c9e2254ebbb38f4e58cde6" exitCode=0 Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.088010 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f688b90e-6a1e-4fa8-9e27-74376499eb6b","Type":"ContainerDied","Data":"830e0de768e749512b91e67641bbeb5fb1bacdf9e9c9e2254ebbb38f4e58cde6"} Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.088743 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.089262 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.807979 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.809575 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.810267 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.810677 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.810974 4956 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872027 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872116 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872184 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872251 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872263 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872449 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872456 4956 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.872513 4956 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:50 crc kubenswrapper[4956]: I1211 21:51:50.973437 4956 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.095982 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.097109 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.097130 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59" exitCode=0 Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.097203 4956 scope.go:117] "RemoveContainer" containerID="98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.111538 4956 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.111881 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.112295 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.114740 4956 scope.go:117] "RemoveContainer" containerID="0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.126732 4956 scope.go:117] "RemoveContainer" containerID="3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.139260 4956 scope.go:117] "RemoveContainer" containerID="11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.152919 4956 scope.go:117] "RemoveContainer" containerID="8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.170142 4956 scope.go:117] "RemoveContainer" containerID="318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.189590 4956 scope.go:117] "RemoveContainer" containerID="98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc" Dec 11 21:51:51 crc kubenswrapper[4956]: E1211 21:51:51.190181 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\": container with ID starting with 98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc not found: ID does not exist" containerID="98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.190212 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc"} err="failed to get container status \"98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\": rpc error: code = NotFound desc = could not find container \"98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc\": container with ID starting with 98342f9f7793a1fc8515e07b10031d9d23d379d458fadd255cc377f8cf548edc not found: ID does not exist" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.190253 4956 scope.go:117] "RemoveContainer" containerID="0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df" Dec 11 21:51:51 crc kubenswrapper[4956]: E1211 21:51:51.190640 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\": container with ID starting with 0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df not found: ID does not exist" containerID="0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.190682 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df"} err="failed to get container status \"0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\": rpc error: code = NotFound desc = could not find container \"0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df\": container with ID starting with 0a3d5420fc2f31d67ce561906fe4d6f7f60656b2cd98304177dc019647e091df not found: ID does not exist" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.190712 4956 scope.go:117] "RemoveContainer" containerID="3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40" Dec 11 21:51:51 crc kubenswrapper[4956]: E1211 21:51:51.191094 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\": container with ID starting with 3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40 not found: ID does not exist" containerID="3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.191118 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40"} err="failed to get container status \"3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\": rpc error: code = NotFound desc = could not find container \"3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40\": container with ID starting with 3c5c9ceea03970794abcfc5be34c49d414a4962571c657e8cecad00e0ccf7a40 not found: ID does not exist" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.191134 4956 scope.go:117] "RemoveContainer" containerID="11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd" Dec 11 21:51:51 crc kubenswrapper[4956]: E1211 21:51:51.191580 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\": container with ID starting with 11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd not found: ID does not exist" containerID="11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.191612 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd"} err="failed to get container status \"11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\": rpc error: code = NotFound desc = could not find container \"11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd\": container with ID starting with 11384e03da20af510f3b5110db1cdf1b693a76ccca412df31d262ab8157e16bd not found: ID does not exist" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.191632 4956 scope.go:117] "RemoveContainer" containerID="8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59" Dec 11 21:51:51 crc kubenswrapper[4956]: E1211 21:51:51.191974 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\": container with ID starting with 8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59 not found: ID does not exist" containerID="8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.192000 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59"} err="failed to get container status \"8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\": rpc error: code = NotFound desc = could not find container \"8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59\": container with ID starting with 8658ede2e542e51287d61c5cd0293347c7e0174ed32144d4a1d77b5d61f7ee59 not found: ID does not exist" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.192018 4956 scope.go:117] "RemoveContainer" containerID="318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519" Dec 11 21:51:51 crc kubenswrapper[4956]: E1211 21:51:51.192440 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\": container with ID starting with 318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519 not found: ID does not exist" containerID="318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.192462 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519"} err="failed to get container status \"318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\": rpc error: code = NotFound desc = could not find container \"318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519\": container with ID starting with 318483ab24403240414fc7d87c7e93495067e3d17ebb0485799ea98c316cd519 not found: ID does not exist" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.337504 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.338077 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.338540 4956 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.338990 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.378333 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-var-lock\") pod \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.378461 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kube-api-access\") pod \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.378560 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kubelet-dir\") pod \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\" (UID: \"f688b90e-6a1e-4fa8-9e27-74376499eb6b\") " Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.378848 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f688b90e-6a1e-4fa8-9e27-74376499eb6b" (UID: "f688b90e-6a1e-4fa8-9e27-74376499eb6b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.378881 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-var-lock" (OuterVolumeSpecName: "var-lock") pod "f688b90e-6a1e-4fa8-9e27-74376499eb6b" (UID: "f688b90e-6a1e-4fa8-9e27-74376499eb6b"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.383934 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f688b90e-6a1e-4fa8-9e27-74376499eb6b" (UID: "f688b90e-6a1e-4fa8-9e27-74376499eb6b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.480492 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.480870 4956 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:51 crc kubenswrapper[4956]: I1211 21:51:51.480883 4956 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f688b90e-6a1e-4fa8-9e27-74376499eb6b-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 21:51:52 crc kubenswrapper[4956]: I1211 21:51:52.027177 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 11 21:51:52 crc kubenswrapper[4956]: I1211 21:51:52.106087 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f688b90e-6a1e-4fa8-9e27-74376499eb6b","Type":"ContainerDied","Data":"4edf17a9a14d0af285485447223b90242cd077976f67b609f9ebc0179b927f40"} Dec 11 21:51:52 crc kubenswrapper[4956]: I1211 21:51:52.106127 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4edf17a9a14d0af285485447223b90242cd077976f67b609f9ebc0179b927f40" Dec 11 21:51:52 crc kubenswrapper[4956]: I1211 21:51:52.106171 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 21:51:52 crc kubenswrapper[4956]: I1211 21:51:52.109482 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:52 crc kubenswrapper[4956]: I1211 21:51:52.109800 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:54 crc kubenswrapper[4956]: E1211 21:51:54.773437 4956 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.83:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188047b7d54d48c1 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 21:51:48.812433601 +0000 UTC m=+201.256811761,LastTimestamp:2025-12-11 21:51:48.812433601 +0000 UTC m=+201.256811761,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.252914 4956 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.253328 4956 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.253922 4956 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.254462 4956 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.254796 4956 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:57 crc kubenswrapper[4956]: I1211 21:51:57.254846 4956 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.255098 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="200ms" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.455609 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="400ms" Dec 11 21:51:57 crc kubenswrapper[4956]: E1211 21:51:57.856648 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="800ms" Dec 11 21:51:58 crc kubenswrapper[4956]: I1211 21:51:58.024530 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:58 crc kubenswrapper[4956]: I1211 21:51:58.025083 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:51:58 crc kubenswrapper[4956]: E1211 21:51:58.657623 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="1.6s" Dec 11 21:52:00 crc kubenswrapper[4956]: E1211 21:52:00.259043 4956 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.83:6443: connect: connection refused" interval="3.2s" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.020495 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.021210 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.022166 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.047419 4956 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.047468 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:01 crc kubenswrapper[4956]: E1211 21:52:01.048238 4956 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.050304 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:01 crc kubenswrapper[4956]: I1211 21:52:01.162300 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5114800c44accd7d870d7b28386aeb433250b01f19af1a67fd17db370e16eb78"} Dec 11 21:52:02 crc kubenswrapper[4956]: I1211 21:52:02.174352 4956 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="9b4d03a46b74f94b27cb7d29c46e4e2a4dc9d75e8c97f7a2b472dff3df2b9fe9" exitCode=0 Dec 11 21:52:02 crc kubenswrapper[4956]: I1211 21:52:02.174535 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"9b4d03a46b74f94b27cb7d29c46e4e2a4dc9d75e8c97f7a2b472dff3df2b9fe9"} Dec 11 21:52:02 crc kubenswrapper[4956]: I1211 21:52:02.174954 4956 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:02 crc kubenswrapper[4956]: I1211 21:52:02.176733 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:02 crc kubenswrapper[4956]: I1211 21:52:02.175561 4956 status_manager.go:851] "Failed to get status for pod" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:52:02 crc kubenswrapper[4956]: I1211 21:52:02.177568 4956 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" Dec 11 21:52:02 crc kubenswrapper[4956]: E1211 21:52:02.177705 4956 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.83:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:03 crc kubenswrapper[4956]: I1211 21:52:03.183069 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"208ec244dd79e20194a6cfc887c8591d583b31f3fc094bd30237ff8a198fed36"} Dec 11 21:52:03 crc kubenswrapper[4956]: I1211 21:52:03.183447 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0339b0fddb8daf33a6b8a856b6c3264b8969100e535ad0e329442e6065a19e6c"} Dec 11 21:52:03 crc kubenswrapper[4956]: I1211 21:52:03.183459 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5706ba2dd7c8dbbf5ec0e257f8ee0c9cdbca7f3c6c772f113f72aa4657aae2f9"} Dec 11 21:52:03 crc kubenswrapper[4956]: I1211 21:52:03.183471 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c827ac7869f5643f6401cb88ce97d05d5d74d12c36d1296f774a2f0a9b5c9760"} Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.190206 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a3579743bd905e58946406539059f876179fd489330db00a2907e26a1a3e0c31"} Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.190523 4956 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.190542 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.190801 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.193667 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.193848 4956 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343" exitCode=1 Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.193938 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343"} Dec 11 21:52:04 crc kubenswrapper[4956]: I1211 21:52:04.194398 4956 scope.go:117] "RemoveContainer" containerID="29d64b342540248d68079183a64a13fcd2a7d2b4ab0ba35f6e67eb8210795343" Dec 11 21:52:05 crc kubenswrapper[4956]: I1211 21:52:05.205412 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 21:52:05 crc kubenswrapper[4956]: I1211 21:52:05.206348 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"09dc4b1d8557894e3528392d69a42dff5b1bb021eda01a9f66faf4670ec76d1d"} Dec 11 21:52:06 crc kubenswrapper[4956]: I1211 21:52:06.050990 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:06 crc kubenswrapper[4956]: I1211 21:52:06.051227 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:06 crc kubenswrapper[4956]: I1211 21:52:06.060685 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:09 crc kubenswrapper[4956]: I1211 21:52:09.199963 4956 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:09 crc kubenswrapper[4956]: I1211 21:52:09.230323 4956 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:09 crc kubenswrapper[4956]: I1211 21:52:09.230367 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:09 crc kubenswrapper[4956]: I1211 21:52:09.234619 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:09 crc kubenswrapper[4956]: I1211 21:52:09.293005 4956 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="40e04462-a0f3-4dd7-b56c-645f912d4ec4" Dec 11 21:52:09 crc kubenswrapper[4956]: I1211 21:52:09.914416 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" podUID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" containerName="oauth-openshift" containerID="cri-o://ffc9556e97d258b01bfb3b9cd94e82f237e03be95ffb33d792ec60994de8aec4" gracePeriod=15 Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.237094 4956 generic.go:334] "Generic (PLEG): container finished" podID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" containerID="ffc9556e97d258b01bfb3b9cd94e82f237e03be95ffb33d792ec60994de8aec4" exitCode=0 Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.237220 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" event={"ID":"6dae87cb-e091-408e-9b9d-4d45e7797fc5","Type":"ContainerDied","Data":"ffc9556e97d258b01bfb3b9cd94e82f237e03be95ffb33d792ec60994de8aec4"} Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.237474 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" event={"ID":"6dae87cb-e091-408e-9b9d-4d45e7797fc5","Type":"ContainerDied","Data":"11c57e4b7a2ace1261239b7ec8a9c44be21f6f62bd683b7741801610aad96bbe"} Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.237500 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11c57e4b7a2ace1261239b7ec8a9c44be21f6f62bd683b7741801610aad96bbe" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.237714 4956 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.237732 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.240738 4956 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="40e04462-a0f3-4dd7-b56c-645f912d4ec4" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.261071 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.315913 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-error\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.315967 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-session\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.315994 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-ocp-branding-template\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316030 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-service-ca\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316057 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-policies\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316082 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-trusted-ca-bundle\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316104 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-router-certs\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316126 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-serving-cert\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316148 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-cliconfig\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316171 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-provider-selection\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316216 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-dir\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316231 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-login\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316253 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pw7f\" (UniqueName: \"kubernetes.io/projected/6dae87cb-e091-408e-9b9d-4d45e7797fc5-kube-api-access-7pw7f\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.316270 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-idp-0-file-data\") pod \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\" (UID: \"6dae87cb-e091-408e-9b9d-4d45e7797fc5\") " Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.318007 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.318559 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.319115 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.319547 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.320059 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.322680 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.323241 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.323361 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.323814 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.324182 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.324577 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.325046 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.325833 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.326710 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dae87cb-e091-408e-9b9d-4d45e7797fc5-kube-api-access-7pw7f" (OuterVolumeSpecName: "kube-api-access-7pw7f") pod "6dae87cb-e091-408e-9b9d-4d45e7797fc5" (UID: "6dae87cb-e091-408e-9b9d-4d45e7797fc5"). InnerVolumeSpecName "kube-api-access-7pw7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418043 4956 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418085 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418098 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pw7f\" (UniqueName: \"kubernetes.io/projected/6dae87cb-e091-408e-9b9d-4d45e7797fc5-kube-api-access-7pw7f\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418107 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418117 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418126 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418136 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418144 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418154 4956 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418165 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418177 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418185 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418195 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.418206 4956 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6dae87cb-e091-408e-9b9d-4d45e7797fc5-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:10 crc kubenswrapper[4956]: I1211 21:52:10.778884 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:52:11 crc kubenswrapper[4956]: I1211 21:52:11.242637 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hq2jl" Dec 11 21:52:12 crc kubenswrapper[4956]: I1211 21:52:12.466460 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:52:12 crc kubenswrapper[4956]: I1211 21:52:12.471546 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:52:16 crc kubenswrapper[4956]: I1211 21:52:16.888337 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:52:16 crc kubenswrapper[4956]: I1211 21:52:16.888754 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:52:16 crc kubenswrapper[4956]: I1211 21:52:16.888834 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:52:16 crc kubenswrapper[4956]: I1211 21:52:16.889568 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 21:52:16 crc kubenswrapper[4956]: I1211 21:52:16.889636 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d" gracePeriod=600 Dec 11 21:52:17 crc kubenswrapper[4956]: I1211 21:52:17.278829 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d" exitCode=0 Dec 11 21:52:17 crc kubenswrapper[4956]: I1211 21:52:17.278926 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d"} Dec 11 21:52:18 crc kubenswrapper[4956]: I1211 21:52:18.289952 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"b26c68c76a615f74dfaca11d8d29401bc54299569edf6b7ab00d1822c24a1a25"} Dec 11 21:52:18 crc kubenswrapper[4956]: I1211 21:52:18.815971 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 21:52:18 crc kubenswrapper[4956]: I1211 21:52:18.934513 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 21:52:19 crc kubenswrapper[4956]: I1211 21:52:19.988876 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 21:52:20 crc kubenswrapper[4956]: I1211 21:52:20.153592 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 21:52:20 crc kubenswrapper[4956]: I1211 21:52:20.305821 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 21:52:20 crc kubenswrapper[4956]: I1211 21:52:20.353169 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 21:52:20 crc kubenswrapper[4956]: I1211 21:52:20.546119 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 21:52:20 crc kubenswrapper[4956]: I1211 21:52:20.782724 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 21:52:20 crc kubenswrapper[4956]: I1211 21:52:20.791200 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.003878 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.057637 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.259840 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.307008 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.386578 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.610716 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.919380 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.930040 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.952760 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 21:52:21 crc kubenswrapper[4956]: I1211 21:52:21.993129 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.081430 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.273555 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.278542 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.295643 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.330921 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.346135 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.487678 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.532448 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.686270 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.816732 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.839635 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.898048 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 21:52:22 crc kubenswrapper[4956]: I1211 21:52:22.981386 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.001156 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.049236 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.063171 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.239855 4956 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.274826 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.313689 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.430190 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.521503 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.556077 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.642836 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.644939 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.657905 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.692704 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.749344 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.805402 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 21:52:23 crc kubenswrapper[4956]: I1211 21:52:23.849940 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.036417 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.046241 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.091348 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.093592 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.098878 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.243213 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.369806 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.436876 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.526515 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.606014 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.670518 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.753240 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.806580 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.937875 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 21:52:24 crc kubenswrapper[4956]: I1211 21:52:24.946185 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.007174 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.060262 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.186502 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.222295 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.383433 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.462775 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.475962 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.484096 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.498449 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.525951 4956 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.582739 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.625495 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 21:52:25 crc kubenswrapper[4956]: I1211 21:52:25.891378 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.100106 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.125712 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.246257 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.269307 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.274329 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.309002 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.332360 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.484053 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.558650 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.663483 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.679104 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.690420 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.862825 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.870488 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 21:52:26 crc kubenswrapper[4956]: I1211 21:52:26.932830 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.132586 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.140714 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.182238 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.293466 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.304075 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.487977 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.491589 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.505807 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.562301 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.568839 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.598394 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.603712 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.664743 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.740599 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.800025 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.801159 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.886087 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.886245 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.901114 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.912791 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 21:52:27 crc kubenswrapper[4956]: I1211 21:52:27.926828 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.058044 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.098167 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.199329 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.295107 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.317478 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.335378 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.365867 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.370354 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.474909 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.494195 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.660069 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.696513 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.723628 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.774417 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.798428 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.875196 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.963859 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 21:52:28 crc kubenswrapper[4956]: I1211 21:52:28.993934 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.013538 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.190858 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.228321 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.319392 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.358814 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.382369 4956 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.463060 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.464284 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.464426 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.472973 4956 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.488399 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.522405 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.560394 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.572522 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.587211 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.603902 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.754413 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.875155 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.903440 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.908641 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 21:52:29 crc kubenswrapper[4956]: I1211 21:52:29.982871 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.038566 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.109001 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.239597 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.240850 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.270933 4956 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.272063 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=42.272048308 podStartE2EDuration="42.272048308s" podCreationTimestamp="2025-12-11 21:51:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:52:09.242090275 +0000 UTC m=+221.686468435" watchObservedRunningTime="2025-12-11 21:52:30.272048308 +0000 UTC m=+242.716426468" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.276386 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hq2jl","openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.276466 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-7dc5844c99-8trq2"] Dec 11 21:52:30 crc kubenswrapper[4956]: E1211 21:52:30.276656 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" containerName="installer" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.276670 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" containerName="installer" Dec 11 21:52:30 crc kubenswrapper[4956]: E1211 21:52:30.276685 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" containerName="oauth-openshift" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.276693 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" containerName="oauth-openshift" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.276842 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f688b90e-6a1e-4fa8-9e27-74376499eb6b" containerName="installer" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.276853 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" containerName="oauth-openshift" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.277224 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.277372 4956 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.277393 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="98a84156-b48d-4a54-bc7a-21f1f3dbe78f" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.279272 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.279272 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.279761 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.279956 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.280167 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.280314 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.280343 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.280237 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.280949 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.282152 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.282289 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.282595 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.290215 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.294085 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.301096 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.301225 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.307980 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.307961139 podStartE2EDuration="21.307961139s" podCreationTimestamp="2025-12-11 21:52:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:52:30.304656851 +0000 UTC m=+242.749035031" watchObservedRunningTime="2025-12-11 21:52:30.307961139 +0000 UTC m=+242.752339299" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.308834 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.320492 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.360520 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.363558 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.384867 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-service-ca\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.384914 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/351c5163-492c-461a-acfd-85991084901c-audit-dir\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385111 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-login\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385240 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-audit-policies\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385271 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385302 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-router-certs\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385352 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385392 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf5m6\" (UniqueName: \"kubernetes.io/projected/351c5163-492c-461a-acfd-85991084901c-kube-api-access-pf5m6\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385431 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385503 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385579 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385619 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385644 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-error\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.385672 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-session\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.389940 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.430995 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.453468 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.466812 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486074 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-session\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486124 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-service-ca\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486145 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/351c5163-492c-461a-acfd-85991084901c-audit-dir\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486168 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-login\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486190 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486206 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-audit-policies\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486228 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-router-certs\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486257 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486279 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf5m6\" (UniqueName: \"kubernetes.io/projected/351c5163-492c-461a-acfd-85991084901c-kube-api-access-pf5m6\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486300 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486328 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486356 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486380 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486395 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-error\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.486423 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/351c5163-492c-461a-acfd-85991084901c-audit-dir\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.487361 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-service-ca\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.488206 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.488962 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-audit-policies\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.488989 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.492152 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.492446 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.492446 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-login\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.492492 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-session\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.492721 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-template-error\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.493018 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-router-certs\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.493335 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.508524 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf5m6\" (UniqueName: \"kubernetes.io/projected/351c5163-492c-461a-acfd-85991084901c-kube-api-access-pf5m6\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.509331 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.510464 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/351c5163-492c-461a-acfd-85991084901c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7dc5844c99-8trq2\" (UID: \"351c5163-492c-461a-acfd-85991084901c\") " pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.528703 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.595436 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.620410 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.635923 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.647659 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.702682 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.706247 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.730731 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.760644 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 21:52:30 crc kubenswrapper[4956]: I1211 21:52:30.817919 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.039845 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.130744 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.239172 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.311383 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.357736 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.442157 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.480462 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.521208 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.552716 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.576902 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.599594 4956 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.600383 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc" gracePeriod=5 Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.645609 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.748246 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.781110 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.811513 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 21:52:31 crc kubenswrapper[4956]: I1211 21:52:31.966884 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.027216 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dae87cb-e091-408e-9b9d-4d45e7797fc5" path="/var/lib/kubelet/pods/6dae87cb-e091-408e-9b9d-4d45e7797fc5/volumes" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.120133 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.169903 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.175342 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.199885 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.240493 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.298752 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.399733 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.402397 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.459948 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.599712 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.602419 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.655022 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.665264 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.769928 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.781281 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.784462 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.860515 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.902326 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 21:52:32 crc kubenswrapper[4956]: I1211 21:52:32.979893 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.013341 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.072891 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.116680 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.189040 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.285332 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.387986 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.408218 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.478955 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.526846 4956 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.578263 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.593555 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.688466 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.936526 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 21:52:33 crc kubenswrapper[4956]: I1211 21:52:33.950629 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.009036 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.023570 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.039162 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.159665 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.236317 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.257176 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.409699 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.711996 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7dc5844c99-8trq2"] Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.906503 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 21:52:34 crc kubenswrapper[4956]: I1211 21:52:34.943968 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 21:52:35 crc kubenswrapper[4956]: I1211 21:52:35.164901 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 21:52:35 crc kubenswrapper[4956]: I1211 21:52:35.219351 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7dc5844c99-8trq2"] Dec 11 21:52:35 crc kubenswrapper[4956]: I1211 21:52:35.380506 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" event={"ID":"351c5163-492c-461a-acfd-85991084901c","Type":"ContainerStarted","Data":"dea64964c5a2a978a9a9ca15da0812f8d76cb1099088c57b62fd1bd757aed7e9"} Dec 11 21:52:35 crc kubenswrapper[4956]: I1211 21:52:35.586793 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 21:52:35 crc kubenswrapper[4956]: I1211 21:52:35.917683 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 21:52:36 crc kubenswrapper[4956]: I1211 21:52:36.388661 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" event={"ID":"351c5163-492c-461a-acfd-85991084901c","Type":"ContainerStarted","Data":"bade311a86cb7b25e3abf56eb9807624bf9db0fe8d7f1b83ecf6d02b15a662b8"} Dec 11 21:52:36 crc kubenswrapper[4956]: I1211 21:52:36.389209 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:36 crc kubenswrapper[4956]: I1211 21:52:36.395826 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" Dec 11 21:52:36 crc kubenswrapper[4956]: I1211 21:52:36.429677 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 21:52:36 crc kubenswrapper[4956]: I1211 21:52:36.445513 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7dc5844c99-8trq2" podStartSLOduration=52.445486097 podStartE2EDuration="52.445486097s" podCreationTimestamp="2025-12-11 21:51:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:52:36.41758599 +0000 UTC m=+248.861964160" watchObservedRunningTime="2025-12-11 21:52:36.445486097 +0000 UTC m=+248.889864287" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.170118 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.170198 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275346 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275494 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275540 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275625 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275651 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275690 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275737 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275762 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.275864 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.277085 4956 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.277107 4956 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.277120 4956 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.277140 4956 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.287669 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.378208 4956 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.399617 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.399702 4956 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc" exitCode=137 Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.400793 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.401077 4956 scope.go:117] "RemoveContainer" containerID="c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.411084 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.430790 4956 scope.go:117] "RemoveContainer" containerID="c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc" Dec 11 21:52:37 crc kubenswrapper[4956]: E1211 21:52:37.431283 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc\": container with ID starting with c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc not found: ID does not exist" containerID="c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.431410 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc"} err="failed to get container status \"c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc\": rpc error: code = NotFound desc = could not find container \"c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc\": container with ID starting with c91b45bf0a17c940cdfea92ffad7a0cc5b89cae07f75a244567ffdb2e9c10ffc not found: ID does not exist" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.855640 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 21:52:37 crc kubenswrapper[4956]: I1211 21:52:37.908946 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 21:52:38 crc kubenswrapper[4956]: I1211 21:52:38.035962 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 11 21:52:38 crc kubenswrapper[4956]: I1211 21:52:38.036280 4956 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 11 21:52:38 crc kubenswrapper[4956]: I1211 21:52:38.050921 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 21:52:38 crc kubenswrapper[4956]: I1211 21:52:38.050975 4956 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="b59af79a-cf32-488d-8b24-f8107d6acb2d" Dec 11 21:52:38 crc kubenswrapper[4956]: I1211 21:52:38.057633 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 21:52:38 crc kubenswrapper[4956]: I1211 21:52:38.057661 4956 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="b59af79a-cf32-488d-8b24-f8107d6acb2d" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.007397 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-zvhmh"] Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.008214 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" podUID="0190a0a5-2358-4044-b766-f164e0124dab" containerName="controller-manager" containerID="cri-o://e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842" gracePeriod=30 Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.140549 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d"] Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.140743 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" podUID="fab9c443-9e80-4943-bfa0-0902f4377230" containerName="route-controller-manager" containerID="cri-o://f7e4ab5e07465cabeac5e8a677a0da486086d8bf4942ad724ecdc45194c7fdbe" gracePeriod=30 Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.407681 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.430078 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-client-ca\") pod \"0190a0a5-2358-4044-b766-f164e0124dab\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.430157 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0190a0a5-2358-4044-b766-f164e0124dab-serving-cert\") pod \"0190a0a5-2358-4044-b766-f164e0124dab\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.430188 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-config\") pod \"0190a0a5-2358-4044-b766-f164e0124dab\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.430225 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcns7\" (UniqueName: \"kubernetes.io/projected/0190a0a5-2358-4044-b766-f164e0124dab-kube-api-access-vcns7\") pod \"0190a0a5-2358-4044-b766-f164e0124dab\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.430272 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-proxy-ca-bundles\") pod \"0190a0a5-2358-4044-b766-f164e0124dab\" (UID: \"0190a0a5-2358-4044-b766-f164e0124dab\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.431400 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-client-ca" (OuterVolumeSpecName: "client-ca") pod "0190a0a5-2358-4044-b766-f164e0124dab" (UID: "0190a0a5-2358-4044-b766-f164e0124dab"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.431411 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0190a0a5-2358-4044-b766-f164e0124dab" (UID: "0190a0a5-2358-4044-b766-f164e0124dab"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.431946 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-config" (OuterVolumeSpecName: "config") pod "0190a0a5-2358-4044-b766-f164e0124dab" (UID: "0190a0a5-2358-4044-b766-f164e0124dab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.448435 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0190a0a5-2358-4044-b766-f164e0124dab-kube-api-access-vcns7" (OuterVolumeSpecName: "kube-api-access-vcns7") pod "0190a0a5-2358-4044-b766-f164e0124dab" (UID: "0190a0a5-2358-4044-b766-f164e0124dab"). InnerVolumeSpecName "kube-api-access-vcns7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.449025 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0190a0a5-2358-4044-b766-f164e0124dab-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0190a0a5-2358-4044-b766-f164e0124dab" (UID: "0190a0a5-2358-4044-b766-f164e0124dab"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.462463 4956 generic.go:334] "Generic (PLEG): container finished" podID="fab9c443-9e80-4943-bfa0-0902f4377230" containerID="f7e4ab5e07465cabeac5e8a677a0da486086d8bf4942ad724ecdc45194c7fdbe" exitCode=0 Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.462546 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" event={"ID":"fab9c443-9e80-4943-bfa0-0902f4377230","Type":"ContainerDied","Data":"f7e4ab5e07465cabeac5e8a677a0da486086d8bf4942ad724ecdc45194c7fdbe"} Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.464688 4956 generic.go:334] "Generic (PLEG): container finished" podID="0190a0a5-2358-4044-b766-f164e0124dab" containerID="e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842" exitCode=0 Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.464739 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.464735 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" event={"ID":"0190a0a5-2358-4044-b766-f164e0124dab","Type":"ContainerDied","Data":"e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842"} Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.464935 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-zvhmh" event={"ID":"0190a0a5-2358-4044-b766-f164e0124dab","Type":"ContainerDied","Data":"adc3e2446fec9da62bec98bcebcb05036b67a7fc349b26e69200d60ba84aa57d"} Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.464965 4956 scope.go:117] "RemoveContainer" containerID="e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.487714 4956 scope.go:117] "RemoveContainer" containerID="e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842" Dec 11 21:52:49 crc kubenswrapper[4956]: E1211 21:52:49.488376 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842\": container with ID starting with e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842 not found: ID does not exist" containerID="e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.488413 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842"} err="failed to get container status \"e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842\": rpc error: code = NotFound desc = could not find container \"e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842\": container with ID starting with e95fa539a47118b42a190f4d90e6a216803e87b04ad671984e0ae91d92a9b842 not found: ID does not exist" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.498413 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-zvhmh"] Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.501659 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-zvhmh"] Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.504653 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.531641 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0190a0a5-2358-4044-b766-f164e0124dab-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.531684 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.531696 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcns7\" (UniqueName: \"kubernetes.io/projected/0190a0a5-2358-4044-b766-f164e0124dab-kube-api-access-vcns7\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.531707 4956 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.531717 4956 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0190a0a5-2358-4044-b766-f164e0124dab-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.633314 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5585p\" (UniqueName: \"kubernetes.io/projected/fab9c443-9e80-4943-bfa0-0902f4377230-kube-api-access-5585p\") pod \"fab9c443-9e80-4943-bfa0-0902f4377230\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.633366 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-client-ca\") pod \"fab9c443-9e80-4943-bfa0-0902f4377230\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.633439 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fab9c443-9e80-4943-bfa0-0902f4377230-serving-cert\") pod \"fab9c443-9e80-4943-bfa0-0902f4377230\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.633464 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-config\") pod \"fab9c443-9e80-4943-bfa0-0902f4377230\" (UID: \"fab9c443-9e80-4943-bfa0-0902f4377230\") " Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.634170 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-client-ca" (OuterVolumeSpecName: "client-ca") pod "fab9c443-9e80-4943-bfa0-0902f4377230" (UID: "fab9c443-9e80-4943-bfa0-0902f4377230"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.634284 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-config" (OuterVolumeSpecName: "config") pod "fab9c443-9e80-4943-bfa0-0902f4377230" (UID: "fab9c443-9e80-4943-bfa0-0902f4377230"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.636391 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fab9c443-9e80-4943-bfa0-0902f4377230-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fab9c443-9e80-4943-bfa0-0902f4377230" (UID: "fab9c443-9e80-4943-bfa0-0902f4377230"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.637543 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fab9c443-9e80-4943-bfa0-0902f4377230-kube-api-access-5585p" (OuterVolumeSpecName: "kube-api-access-5585p") pod "fab9c443-9e80-4943-bfa0-0902f4377230" (UID: "fab9c443-9e80-4943-bfa0-0902f4377230"). InnerVolumeSpecName "kube-api-access-5585p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.734459 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5585p\" (UniqueName: \"kubernetes.io/projected/fab9c443-9e80-4943-bfa0-0902f4377230-kube-api-access-5585p\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.734502 4956 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.734513 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fab9c443-9e80-4943-bfa0-0902f4377230-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:49 crc kubenswrapper[4956]: I1211 21:52:49.734547 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fab9c443-9e80-4943-bfa0-0902f4377230-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.026845 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0190a0a5-2358-4044-b766-f164e0124dab" path="/var/lib/kubelet/pods/0190a0a5-2358-4044-b766-f164e0124dab/volumes" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.472021 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" event={"ID":"fab9c443-9e80-4943-bfa0-0902f4377230","Type":"ContainerDied","Data":"57d97e71eb3e12c4f6e045eca90dc55e551a22a750f68870c2085b8ca0226557"} Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.472086 4956 scope.go:117] "RemoveContainer" containerID="f7e4ab5e07465cabeac5e8a677a0da486086d8bf4942ad724ecdc45194c7fdbe" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.472199 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.489452 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d"] Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.492775 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nx88d"] Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920057 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm"] Dec 11 21:52:50 crc kubenswrapper[4956]: E1211 21:52:50.920552 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920579 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 21:52:50 crc kubenswrapper[4956]: E1211 21:52:50.920611 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0190a0a5-2358-4044-b766-f164e0124dab" containerName="controller-manager" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920621 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="0190a0a5-2358-4044-b766-f164e0124dab" containerName="controller-manager" Dec 11 21:52:50 crc kubenswrapper[4956]: E1211 21:52:50.920631 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fab9c443-9e80-4943-bfa0-0902f4377230" containerName="route-controller-manager" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920644 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="fab9c443-9e80-4943-bfa0-0902f4377230" containerName="route-controller-manager" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920823 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="0190a0a5-2358-4044-b766-f164e0124dab" containerName="controller-manager" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920849 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="fab9c443-9e80-4943-bfa0-0902f4377230" containerName="route-controller-manager" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.920859 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.921746 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.923951 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-84c944ffb4-prcbm"] Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.925095 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.929526 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.929733 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.930156 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.930327 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.930366 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.932007 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.932834 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.933211 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.933334 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.933434 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.933532 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.933537 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.938736 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.946216 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm"] Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947247 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-client-ca\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947303 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-serving-cert\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947344 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-client-ca\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947372 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f60da7-5fbf-407d-bef1-88d1738e09e4-serving-cert\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947440 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7tm2\" (UniqueName: \"kubernetes.io/projected/43f60da7-5fbf-407d-bef1-88d1738e09e4-kube-api-access-g7tm2\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947486 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-config\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947510 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-config\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947538 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-proxy-ca-bundles\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.947567 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpkgc\" (UniqueName: \"kubernetes.io/projected/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-kube-api-access-gpkgc\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:50 crc kubenswrapper[4956]: I1211 21:52:50.953197 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84c944ffb4-prcbm"] Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048608 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7tm2\" (UniqueName: \"kubernetes.io/projected/43f60da7-5fbf-407d-bef1-88d1738e09e4-kube-api-access-g7tm2\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048664 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-config\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048689 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-config\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048710 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-proxy-ca-bundles\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048729 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpkgc\" (UniqueName: \"kubernetes.io/projected/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-kube-api-access-gpkgc\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048804 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-client-ca\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048839 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-serving-cert\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048881 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-client-ca\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.048905 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f60da7-5fbf-407d-bef1-88d1738e09e4-serving-cert\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.050530 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-proxy-ca-bundles\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.050609 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-config\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.050869 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-config\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.051894 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-client-ca\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.052080 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-client-ca\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.059701 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-serving-cert\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.067084 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f60da7-5fbf-407d-bef1-88d1738e09e4-serving-cert\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.072533 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7tm2\" (UniqueName: \"kubernetes.io/projected/43f60da7-5fbf-407d-bef1-88d1738e09e4-kube-api-access-g7tm2\") pod \"route-controller-manager-84b96c464b-fdqxm\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.077345 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpkgc\" (UniqueName: \"kubernetes.io/projected/c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4-kube-api-access-gpkgc\") pod \"controller-manager-84c944ffb4-prcbm\" (UID: \"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4\") " pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.244631 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.262108 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.445254 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84c944ffb4-prcbm"] Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.479887 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" event={"ID":"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4","Type":"ContainerStarted","Data":"5d77e14d3846c688ff77d56dff7cae64811b87036bad7cf8a6d6993acba90873"} Dec 11 21:52:51 crc kubenswrapper[4956]: I1211 21:52:51.489266 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm"] Dec 11 21:52:51 crc kubenswrapper[4956]: W1211 21:52:51.497644 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43f60da7_5fbf_407d_bef1_88d1738e09e4.slice/crio-003e661f2aa6b250bb7b199df84a9f5b9b0044a37371d9ae7d92c6b0a2b290d7 WatchSource:0}: Error finding container 003e661f2aa6b250bb7b199df84a9f5b9b0044a37371d9ae7d92c6b0a2b290d7: Status 404 returned error can't find the container with id 003e661f2aa6b250bb7b199df84a9f5b9b0044a37371d9ae7d92c6b0a2b290d7 Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.030097 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fab9c443-9e80-4943-bfa0-0902f4377230" path="/var/lib/kubelet/pods/fab9c443-9e80-4943-bfa0-0902f4377230/volumes" Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.486167 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" event={"ID":"c4a49da0-1bdd-40bb-ab61-d0582ff1d9e4","Type":"ContainerStarted","Data":"f6c2a1792c7d94243553ac1bc4356cfef1b67a0c8bdd6327c0f586c5e2c84664"} Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.487877 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.489158 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" event={"ID":"43f60da7-5fbf-407d-bef1-88d1738e09e4","Type":"ContainerStarted","Data":"699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2"} Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.489294 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" event={"ID":"43f60da7-5fbf-407d-bef1-88d1738e09e4","Type":"ContainerStarted","Data":"003e661f2aa6b250bb7b199df84a9f5b9b0044a37371d9ae7d92c6b0a2b290d7"} Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.489392 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.491910 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.494648 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.503852 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-84c944ffb4-prcbm" podStartSLOduration=3.503835531 podStartE2EDuration="3.503835531s" podCreationTimestamp="2025-12-11 21:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:52:52.5007206 +0000 UTC m=+264.945098750" watchObservedRunningTime="2025-12-11 21:52:52.503835531 +0000 UTC m=+264.948213681" Dec 11 21:52:52 crc kubenswrapper[4956]: I1211 21:52:52.520994 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" podStartSLOduration=3.52097896 podStartE2EDuration="3.52097896s" podCreationTimestamp="2025-12-11 21:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:52:52.518111166 +0000 UTC m=+264.962489326" watchObservedRunningTime="2025-12-11 21:52:52.52097896 +0000 UTC m=+264.965357110" Dec 11 21:53:06 crc kubenswrapper[4956]: I1211 21:53:06.800696 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tvxwj"] Dec 11 21:53:06 crc kubenswrapper[4956]: I1211 21:53:06.802726 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tvxwj" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="registry-server" containerID="cri-o://dfefaaf566cfcc646526855f975120112b68e499ea8c501017b68f6d1fac8eb7" gracePeriod=2 Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.727573 4956 generic.go:334] "Generic (PLEG): container finished" podID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerID="dfefaaf566cfcc646526855f975120112b68e499ea8c501017b68f6d1fac8eb7" exitCode=0 Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.727758 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerDied","Data":"dfefaaf566cfcc646526855f975120112b68e499ea8c501017b68f6d1fac8eb7"} Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.820079 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.926269 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g7hp\" (UniqueName: \"kubernetes.io/projected/e87ecc79-efd6-4f8c-859b-4c527eaf0225-kube-api-access-5g7hp\") pod \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.926480 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-utilities\") pod \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.926548 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-catalog-content\") pod \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\" (UID: \"e87ecc79-efd6-4f8c-859b-4c527eaf0225\") " Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.928648 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-utilities" (OuterVolumeSpecName: "utilities") pod "e87ecc79-efd6-4f8c-859b-4c527eaf0225" (UID: "e87ecc79-efd6-4f8c-859b-4c527eaf0225"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:07 crc kubenswrapper[4956]: I1211 21:53:07.933974 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87ecc79-efd6-4f8c-859b-4c527eaf0225-kube-api-access-5g7hp" (OuterVolumeSpecName: "kube-api-access-5g7hp") pod "e87ecc79-efd6-4f8c-859b-4c527eaf0225" (UID: "e87ecc79-efd6-4f8c-859b-4c527eaf0225"). InnerVolumeSpecName "kube-api-access-5g7hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.028286 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.028327 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g7hp\" (UniqueName: \"kubernetes.io/projected/e87ecc79-efd6-4f8c-859b-4c527eaf0225-kube-api-access-5g7hp\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.061852 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e87ecc79-efd6-4f8c-859b-4c527eaf0225" (UID: "e87ecc79-efd6-4f8c-859b-4c527eaf0225"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.130324 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87ecc79-efd6-4f8c-859b-4c527eaf0225-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.735352 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tvxwj" event={"ID":"e87ecc79-efd6-4f8c-859b-4c527eaf0225","Type":"ContainerDied","Data":"502c0e335943cddd990ced13d0fd1fcf2560e5b770ac06bdf70e73f8ce3278df"} Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.735702 4956 scope.go:117] "RemoveContainer" containerID="dfefaaf566cfcc646526855f975120112b68e499ea8c501017b68f6d1fac8eb7" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.735561 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tvxwj" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.768903 4956 scope.go:117] "RemoveContainer" containerID="c52d39074cc0c6605380b7da7ef7f02a6e38b53f91ce465ef1f381052af80017" Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.769054 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tvxwj"] Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.774167 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tvxwj"] Dec 11 21:53:08 crc kubenswrapper[4956]: I1211 21:53:08.788052 4956 scope.go:117] "RemoveContainer" containerID="35cdbe5f4762548d1b2e05b47565ec6a520d078d9f107245e29291c3bb11eb01" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.006130 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm"] Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.006425 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" podUID="43f60da7-5fbf-407d-bef1-88d1738e09e4" containerName="route-controller-manager" containerID="cri-o://699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2" gracePeriod=30 Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.470391 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.547491 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7tm2\" (UniqueName: \"kubernetes.io/projected/43f60da7-5fbf-407d-bef1-88d1738e09e4-kube-api-access-g7tm2\") pod \"43f60da7-5fbf-407d-bef1-88d1738e09e4\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.547860 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-client-ca\") pod \"43f60da7-5fbf-407d-bef1-88d1738e09e4\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.547916 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f60da7-5fbf-407d-bef1-88d1738e09e4-serving-cert\") pod \"43f60da7-5fbf-407d-bef1-88d1738e09e4\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.547938 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-config\") pod \"43f60da7-5fbf-407d-bef1-88d1738e09e4\" (UID: \"43f60da7-5fbf-407d-bef1-88d1738e09e4\") " Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.548537 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-client-ca" (OuterVolumeSpecName: "client-ca") pod "43f60da7-5fbf-407d-bef1-88d1738e09e4" (UID: "43f60da7-5fbf-407d-bef1-88d1738e09e4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.548573 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-config" (OuterVolumeSpecName: "config") pod "43f60da7-5fbf-407d-bef1-88d1738e09e4" (UID: "43f60da7-5fbf-407d-bef1-88d1738e09e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.552979 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43f60da7-5fbf-407d-bef1-88d1738e09e4-kube-api-access-g7tm2" (OuterVolumeSpecName: "kube-api-access-g7tm2") pod "43f60da7-5fbf-407d-bef1-88d1738e09e4" (UID: "43f60da7-5fbf-407d-bef1-88d1738e09e4"). InnerVolumeSpecName "kube-api-access-g7tm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.553058 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43f60da7-5fbf-407d-bef1-88d1738e09e4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "43f60da7-5fbf-407d-bef1-88d1738e09e4" (UID: "43f60da7-5fbf-407d-bef1-88d1738e09e4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.649348 4956 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43f60da7-5fbf-407d-bef1-88d1738e09e4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.649380 4956 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-config\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.649391 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7tm2\" (UniqueName: \"kubernetes.io/projected/43f60da7-5fbf-407d-bef1-88d1738e09e4-kube-api-access-g7tm2\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.649399 4956 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/43f60da7-5fbf-407d-bef1-88d1738e09e4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.741138 4956 generic.go:334] "Generic (PLEG): container finished" podID="43f60da7-5fbf-407d-bef1-88d1738e09e4" containerID="699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2" exitCode=0 Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.741217 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" event={"ID":"43f60da7-5fbf-407d-bef1-88d1738e09e4","Type":"ContainerDied","Data":"699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2"} Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.741247 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" event={"ID":"43f60da7-5fbf-407d-bef1-88d1738e09e4","Type":"ContainerDied","Data":"003e661f2aa6b250bb7b199df84a9f5b9b0044a37371d9ae7d92c6b0a2b290d7"} Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.741245 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.741269 4956 scope.go:117] "RemoveContainer" containerID="699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.759120 4956 scope.go:117] "RemoveContainer" containerID="699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2" Dec 11 21:53:09 crc kubenswrapper[4956]: E1211 21:53:09.759701 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2\": container with ID starting with 699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2 not found: ID does not exist" containerID="699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.759744 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2"} err="failed to get container status \"699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2\": rpc error: code = NotFound desc = could not find container \"699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2\": container with ID starting with 699676829e8cde6087cfe20fb55d825f26a7f908c044dbf5e53ea25b450748a2 not found: ID does not exist" Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.779194 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm"] Dec 11 21:53:09 crc kubenswrapper[4956]: I1211 21:53:09.785853 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84b96c464b-fdqxm"] Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.029153 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43f60da7-5fbf-407d-bef1-88d1738e09e4" path="/var/lib/kubelet/pods/43f60da7-5fbf-407d-bef1-88d1738e09e4/volumes" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.029786 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" path="/var/lib/kubelet/pods/e87ecc79-efd6-4f8c-859b-4c527eaf0225/volumes" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.938405 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7"] Dec 11 21:53:10 crc kubenswrapper[4956]: E1211 21:53:10.938699 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="extract-content" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.938718 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="extract-content" Dec 11 21:53:10 crc kubenswrapper[4956]: E1211 21:53:10.938734 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43f60da7-5fbf-407d-bef1-88d1738e09e4" containerName="route-controller-manager" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.938744 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="43f60da7-5fbf-407d-bef1-88d1738e09e4" containerName="route-controller-manager" Dec 11 21:53:10 crc kubenswrapper[4956]: E1211 21:53:10.938765 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="extract-utilities" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.938802 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="extract-utilities" Dec 11 21:53:10 crc kubenswrapper[4956]: E1211 21:53:10.938822 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="registry-server" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.938831 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="registry-server" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.938996 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="43f60da7-5fbf-407d-bef1-88d1738e09e4" containerName="route-controller-manager" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.939020 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87ecc79-efd6-4f8c-859b-4c527eaf0225" containerName="registry-server" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.939424 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.941887 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.942030 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.944274 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.944567 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.947435 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7"] Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.949110 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.949120 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.967665 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-serving-cert\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.967740 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-config\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.967852 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6z2k\" (UniqueName: \"kubernetes.io/projected/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-kube-api-access-r6z2k\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:10 crc kubenswrapper[4956]: I1211 21:53:10.967894 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-client-ca\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.095505 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6z2k\" (UniqueName: \"kubernetes.io/projected/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-kube-api-access-r6z2k\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.095575 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-client-ca\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.096281 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-serving-cert\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.096376 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-config\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.097341 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-client-ca\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.097934 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-config\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.103142 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-serving-cert\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.127731 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6z2k\" (UniqueName: \"kubernetes.io/projected/9012bb9d-b22a-4fa0-9b10-002e25d6f6da-kube-api-access-r6z2k\") pod \"route-controller-manager-74dbcf9cf5-vqjz7\" (UID: \"9012bb9d-b22a-4fa0-9b10-002e25d6f6da\") " pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.278081 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.693351 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7"] Dec 11 21:53:11 crc kubenswrapper[4956]: W1211 21:53:11.702469 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9012bb9d_b22a_4fa0_9b10_002e25d6f6da.slice/crio-3321a35272ae3fd91bf4773c696f0bce64f0bcca361538109457e568dde66c6c WatchSource:0}: Error finding container 3321a35272ae3fd91bf4773c696f0bce64f0bcca361538109457e568dde66c6c: Status 404 returned error can't find the container with id 3321a35272ae3fd91bf4773c696f0bce64f0bcca361538109457e568dde66c6c Dec 11 21:53:11 crc kubenswrapper[4956]: I1211 21:53:11.761037 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" event={"ID":"9012bb9d-b22a-4fa0-9b10-002e25d6f6da","Type":"ContainerStarted","Data":"3321a35272ae3fd91bf4773c696f0bce64f0bcca361538109457e568dde66c6c"} Dec 11 21:53:12 crc kubenswrapper[4956]: I1211 21:53:12.767983 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" event={"ID":"9012bb9d-b22a-4fa0-9b10-002e25d6f6da","Type":"ContainerStarted","Data":"4edf654b9d1811cf766936faa71d961384e22d6cb78a451738bf91f8e4244a93"} Dec 11 21:53:12 crc kubenswrapper[4956]: I1211 21:53:12.768370 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:12 crc kubenswrapper[4956]: I1211 21:53:12.777536 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" Dec 11 21:53:12 crc kubenswrapper[4956]: I1211 21:53:12.787414 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-74dbcf9cf5-vqjz7" podStartSLOduration=3.787391727 podStartE2EDuration="3.787391727s" podCreationTimestamp="2025-12-11 21:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:53:12.785960095 +0000 UTC m=+285.230338255" watchObservedRunningTime="2025-12-11 21:53:12.787391727 +0000 UTC m=+285.231769887" Dec 11 21:53:13 crc kubenswrapper[4956]: I1211 21:53:13.928103 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xmzw8"] Dec 11 21:53:13 crc kubenswrapper[4956]: I1211 21:53:13.928956 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:13 crc kubenswrapper[4956]: I1211 21:53:13.949902 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xmzw8"] Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043385 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043460 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043505 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043536 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-registry-tls\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043565 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-registry-certificates\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043597 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-trusted-ca\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043616 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-bound-sa-token\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.043644 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tzh4\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-kube-api-access-5tzh4\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.066135 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145149 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-registry-tls\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145215 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-registry-certificates\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145251 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-trusted-ca\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145272 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-bound-sa-token\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145303 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tzh4\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-kube-api-access-5tzh4\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145332 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145391 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.145909 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.146811 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-registry-certificates\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.147356 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-trusted-ca\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.150908 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.151089 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-registry-tls\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.167375 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-bound-sa-token\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.175885 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tzh4\" (UniqueName: \"kubernetes.io/projected/437b9b21-2c71-4bc8-b7d8-a64c49e92e13-kube-api-access-5tzh4\") pod \"image-registry-66df7c8f76-xmzw8\" (UID: \"437b9b21-2c71-4bc8-b7d8-a64c49e92e13\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.246020 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.710329 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xmzw8"] Dec 11 21:53:14 crc kubenswrapper[4956]: W1211 21:53:14.721920 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod437b9b21_2c71_4bc8_b7d8_a64c49e92e13.slice/crio-0bf3b1bdeed05a436a81aaed99f120c35465035352cc06920e918efc34736f3d WatchSource:0}: Error finding container 0bf3b1bdeed05a436a81aaed99f120c35465035352cc06920e918efc34736f3d: Status 404 returned error can't find the container with id 0bf3b1bdeed05a436a81aaed99f120c35465035352cc06920e918efc34736f3d Dec 11 21:53:14 crc kubenswrapper[4956]: I1211 21:53:14.780655 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" event={"ID":"437b9b21-2c71-4bc8-b7d8-a64c49e92e13","Type":"ContainerStarted","Data":"0bf3b1bdeed05a436a81aaed99f120c35465035352cc06920e918efc34736f3d"} Dec 11 21:53:15 crc kubenswrapper[4956]: I1211 21:53:15.791585 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" event={"ID":"437b9b21-2c71-4bc8-b7d8-a64c49e92e13","Type":"ContainerStarted","Data":"97b0085450fce1191f0ee502728e236047c825ceee4cc03a026e9088a28d8d0c"} Dec 11 21:53:15 crc kubenswrapper[4956]: I1211 21:53:15.793533 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:15 crc kubenswrapper[4956]: I1211 21:53:15.825690 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" podStartSLOduration=2.8256556 podStartE2EDuration="2.8256556s" podCreationTimestamp="2025-12-11 21:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:53:15.81949055 +0000 UTC m=+288.263868740" watchObservedRunningTime="2025-12-11 21:53:15.8256556 +0000 UTC m=+288.270033790" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.599621 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bn9xr"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.600447 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bn9xr" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="registry-server" containerID="cri-o://5f5b3e7a1aeebd57d07d9847751f28ecaf9a69ffe563c58100158050e3ec5f74" gracePeriod=30 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.618370 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l5286"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.618685 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l5286" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="registry-server" containerID="cri-o://c3fed00e13fa0add010ec9560c6db643ca46e509c24b073f0fc87f79ed7cc9ef" gracePeriod=30 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.631852 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d8mkh"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.632116 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" containerID="cri-o://5a24da353f835950292ac6c9b2d0d0d23f020db6ea2a92e9daf1e643fb3b3f28" gracePeriod=30 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.641744 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5khrp"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.642050 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5khrp" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="registry-server" containerID="cri-o://4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d" gracePeriod=30 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.648136 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-46xqx"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.648442 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-46xqx" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="registry-server" containerID="cri-o://99678f6232902c257785529f2af5870913f4f166fde8ad1851a1faa18bc17b13" gracePeriod=30 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.657846 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g7wp2"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.660171 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.661200 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g7wp2"] Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.679245 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.679316 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.679342 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r487n\" (UniqueName: \"kubernetes.io/projected/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-kube-api-access-r487n\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.746537 4956 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d8mkh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.746830 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.786448 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.786516 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.786532 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r487n\" (UniqueName: \"kubernetes.io/projected/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-kube-api-access-r487n\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.788389 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.797922 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.806793 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r487n\" (UniqueName: \"kubernetes.io/projected/6a02d05c-9762-43bf-8ab5-2e7a1f7695bc-kube-api-access-r487n\") pod \"marketplace-operator-79b997595-g7wp2\" (UID: \"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:23 crc kubenswrapper[4956]: E1211 21:53:23.813659 4956 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d is running failed: container process not found" containerID="4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 21:53:23 crc kubenswrapper[4956]: E1211 21:53:23.816814 4956 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d is running failed: container process not found" containerID="4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 21:53:23 crc kubenswrapper[4956]: E1211 21:53:23.817723 4956 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d is running failed: container process not found" containerID="4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 21:53:23 crc kubenswrapper[4956]: E1211 21:53:23.817784 4956 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-5khrp" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="registry-server" Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.877897 4956 generic.go:334] "Generic (PLEG): container finished" podID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerID="5a24da353f835950292ac6c9b2d0d0d23f020db6ea2a92e9daf1e643fb3b3f28" exitCode=0 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.878000 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" event={"ID":"62802da2-70ad-46d2-bc51-b9bf3e0b6086","Type":"ContainerDied","Data":"5a24da353f835950292ac6c9b2d0d0d23f020db6ea2a92e9daf1e643fb3b3f28"} Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.882649 4956 generic.go:334] "Generic (PLEG): container finished" podID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerID="99678f6232902c257785529f2af5870913f4f166fde8ad1851a1faa18bc17b13" exitCode=0 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.882739 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerDied","Data":"99678f6232902c257785529f2af5870913f4f166fde8ad1851a1faa18bc17b13"} Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.889801 4956 generic.go:334] "Generic (PLEG): container finished" podID="aa022a30-6487-45c0-82b7-336a05167918" containerID="5f5b3e7a1aeebd57d07d9847751f28ecaf9a69ffe563c58100158050e3ec5f74" exitCode=0 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.889876 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bn9xr" event={"ID":"aa022a30-6487-45c0-82b7-336a05167918","Type":"ContainerDied","Data":"5f5b3e7a1aeebd57d07d9847751f28ecaf9a69ffe563c58100158050e3ec5f74"} Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.897084 4956 generic.go:334] "Generic (PLEG): container finished" podID="48520909-a6cd-4ec4-a6db-35a778505823" containerID="4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d" exitCode=0 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.897173 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5khrp" event={"ID":"48520909-a6cd-4ec4-a6db-35a778505823","Type":"ContainerDied","Data":"4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d"} Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.904897 4956 generic.go:334] "Generic (PLEG): container finished" podID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerID="c3fed00e13fa0add010ec9560c6db643ca46e509c24b073f0fc87f79ed7cc9ef" exitCode=0 Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.904942 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerDied","Data":"c3fed00e13fa0add010ec9560c6db643ca46e509c24b073f0fc87f79ed7cc9ef"} Dec 11 21:53:23 crc kubenswrapper[4956]: I1211 21:53:23.985349 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.116135 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.192008 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-catalog-content\") pod \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.192063 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-utilities\") pod \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.192128 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gscp5\" (UniqueName: \"kubernetes.io/projected/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-kube-api-access-gscp5\") pod \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\" (UID: \"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.195467 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-utilities" (OuterVolumeSpecName: "utilities") pod "3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" (UID: "3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.199034 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-kube-api-access-gscp5" (OuterVolumeSpecName: "kube-api-access-gscp5") pod "3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" (UID: "3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336"). InnerVolumeSpecName "kube-api-access-gscp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.222506 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.230098 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.261980 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293659 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-operator-metrics\") pod \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293729 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-catalog-content\") pod \"48520909-a6cd-4ec4-a6db-35a778505823\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293757 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw7z4\" (UniqueName: \"kubernetes.io/projected/62802da2-70ad-46d2-bc51-b9bf3e0b6086-kube-api-access-sw7z4\") pod \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293796 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-trusted-ca\") pod \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\" (UID: \"62802da2-70ad-46d2-bc51-b9bf3e0b6086\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293872 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xptp7\" (UniqueName: \"kubernetes.io/projected/46394f6a-9e6f-49f8-a879-1753789c4ba0-kube-api-access-xptp7\") pod \"46394f6a-9e6f-49f8-a879-1753789c4ba0\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293896 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-catalog-content\") pod \"46394f6a-9e6f-49f8-a879-1753789c4ba0\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293913 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-utilities\") pod \"48520909-a6cd-4ec4-a6db-35a778505823\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293932 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmp9j\" (UniqueName: \"kubernetes.io/projected/48520909-a6cd-4ec4-a6db-35a778505823-kube-api-access-vmp9j\") pod \"48520909-a6cd-4ec4-a6db-35a778505823\" (UID: \"48520909-a6cd-4ec4-a6db-35a778505823\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.293949 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-utilities\") pod \"46394f6a-9e6f-49f8-a879-1753789c4ba0\" (UID: \"46394f6a-9e6f-49f8-a879-1753789c4ba0\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.294928 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "62802da2-70ad-46d2-bc51-b9bf3e0b6086" (UID: "62802da2-70ad-46d2-bc51-b9bf3e0b6086"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.295365 4956 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.295384 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.295396 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gscp5\" (UniqueName: \"kubernetes.io/projected/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-kube-api-access-gscp5\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.295627 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" (UID: "3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.296152 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-utilities" (OuterVolumeSpecName: "utilities") pod "46394f6a-9e6f-49f8-a879-1753789c4ba0" (UID: "46394f6a-9e6f-49f8-a879-1753789c4ba0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.296531 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-utilities" (OuterVolumeSpecName: "utilities") pod "48520909-a6cd-4ec4-a6db-35a778505823" (UID: "48520909-a6cd-4ec4-a6db-35a778505823"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.298271 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "62802da2-70ad-46d2-bc51-b9bf3e0b6086" (UID: "62802da2-70ad-46d2-bc51-b9bf3e0b6086"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.299140 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48520909-a6cd-4ec4-a6db-35a778505823-kube-api-access-vmp9j" (OuterVolumeSpecName: "kube-api-access-vmp9j") pod "48520909-a6cd-4ec4-a6db-35a778505823" (UID: "48520909-a6cd-4ec4-a6db-35a778505823"). InnerVolumeSpecName "kube-api-access-vmp9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.299386 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46394f6a-9e6f-49f8-a879-1753789c4ba0-kube-api-access-xptp7" (OuterVolumeSpecName: "kube-api-access-xptp7") pod "46394f6a-9e6f-49f8-a879-1753789c4ba0" (UID: "46394f6a-9e6f-49f8-a879-1753789c4ba0"). InnerVolumeSpecName "kube-api-access-xptp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.301858 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62802da2-70ad-46d2-bc51-b9bf3e0b6086-kube-api-access-sw7z4" (OuterVolumeSpecName: "kube-api-access-sw7z4") pod "62802da2-70ad-46d2-bc51-b9bf3e0b6086" (UID: "62802da2-70ad-46d2-bc51-b9bf3e0b6086"). InnerVolumeSpecName "kube-api-access-sw7z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.315811 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48520909-a6cd-4ec4-a6db-35a778505823" (UID: "48520909-a6cd-4ec4-a6db-35a778505823"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396645 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396685 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmp9j\" (UniqueName: \"kubernetes.io/projected/48520909-a6cd-4ec4-a6db-35a778505823-kube-api-access-vmp9j\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396696 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396705 4956 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/62802da2-70ad-46d2-bc51-b9bf3e0b6086-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396717 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48520909-a6cd-4ec4-a6db-35a778505823-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396726 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw7z4\" (UniqueName: \"kubernetes.io/projected/62802da2-70ad-46d2-bc51-b9bf3e0b6086-kube-api-access-sw7z4\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396736 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.396744 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xptp7\" (UniqueName: \"kubernetes.io/projected/46394f6a-9e6f-49f8-a879-1753789c4ba0-kube-api-access-xptp7\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.408337 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "46394f6a-9e6f-49f8-a879-1753789c4ba0" (UID: "46394f6a-9e6f-49f8-a879-1753789c4ba0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.458615 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-g7wp2"] Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.498080 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46394f6a-9e6f-49f8-a879-1753789c4ba0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.539676 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.599174 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-catalog-content\") pod \"aa022a30-6487-45c0-82b7-336a05167918\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.599268 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvrgh\" (UniqueName: \"kubernetes.io/projected/aa022a30-6487-45c0-82b7-336a05167918-kube-api-access-jvrgh\") pod \"aa022a30-6487-45c0-82b7-336a05167918\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.599323 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-utilities\") pod \"aa022a30-6487-45c0-82b7-336a05167918\" (UID: \"aa022a30-6487-45c0-82b7-336a05167918\") " Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.602173 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-utilities" (OuterVolumeSpecName: "utilities") pod "aa022a30-6487-45c0-82b7-336a05167918" (UID: "aa022a30-6487-45c0-82b7-336a05167918"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.605410 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa022a30-6487-45c0-82b7-336a05167918-kube-api-access-jvrgh" (OuterVolumeSpecName: "kube-api-access-jvrgh") pod "aa022a30-6487-45c0-82b7-336a05167918" (UID: "aa022a30-6487-45c0-82b7-336a05167918"). InnerVolumeSpecName "kube-api-access-jvrgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.653263 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa022a30-6487-45c0-82b7-336a05167918" (UID: "aa022a30-6487-45c0-82b7-336a05167918"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.700376 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvrgh\" (UniqueName: \"kubernetes.io/projected/aa022a30-6487-45c0-82b7-336a05167918-kube-api-access-jvrgh\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.700403 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.700412 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa022a30-6487-45c0-82b7-336a05167918-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.926079 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" event={"ID":"62802da2-70ad-46d2-bc51-b9bf3e0b6086","Type":"ContainerDied","Data":"b9a7f7a6fdbcd24da2e91b63d1eb12dedc81ed30a488d1d10f6ab11d87b6a6bb"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.926581 4956 scope.go:117] "RemoveContainer" containerID="5a24da353f835950292ac6c9b2d0d0d23f020db6ea2a92e9daf1e643fb3b3f28" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.926128 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d8mkh" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.929359 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-46xqx" event={"ID":"46394f6a-9e6f-49f8-a879-1753789c4ba0","Type":"ContainerDied","Data":"13f934aa01e4aa609acf0210c84257ec30128b421309fc457f766a5d64382bb0"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.929488 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-46xqx" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.933381 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bn9xr" event={"ID":"aa022a30-6487-45c0-82b7-336a05167918","Type":"ContainerDied","Data":"836e4b5b40fe2e4fed5e39901112ad5741ded25c541bf117cde82dbcf6b44348"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.933538 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bn9xr" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.941681 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5khrp" event={"ID":"48520909-a6cd-4ec4-a6db-35a778505823","Type":"ContainerDied","Data":"0c6a88443074267a986f576175d375f029859b56609782ea89d7a77b08fcc47e"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.941708 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5khrp" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.945742 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l5286" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.946198 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l5286" event={"ID":"3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336","Type":"ContainerDied","Data":"4e62638da06d5c3847b0da986597866cad93e3878130864ec6ee21fdcdda881d"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.948030 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" event={"ID":"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc","Type":"ContainerStarted","Data":"f228dad190c06c1e528b93a248325102268c35158bb27892acfa0a262307374d"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.948076 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" event={"ID":"6a02d05c-9762-43bf-8ab5-2e7a1f7695bc","Type":"ContainerStarted","Data":"36c732d7303b775dfa81b6e0f4e0fdc66e1fbbdaae001fd70d8311de3d6730ae"} Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.948743 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.950054 4956 scope.go:117] "RemoveContainer" containerID="99678f6232902c257785529f2af5870913f4f166fde8ad1851a1faa18bc17b13" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.954056 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.967592 4956 scope.go:117] "RemoveContainer" containerID="1ee7042c64ca84873bb5bdffc706449bb401c9212ef9926bf6eb8a1e6210b1e6" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.968254 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d8mkh"] Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.985866 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-g7wp2" podStartSLOduration=1.985845563 podStartE2EDuration="1.985845563s" podCreationTimestamp="2025-12-11 21:53:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 21:53:24.974813011 +0000 UTC m=+297.419191161" watchObservedRunningTime="2025-12-11 21:53:24.985845563 +0000 UTC m=+297.430223713" Dec 11 21:53:24 crc kubenswrapper[4956]: I1211 21:53:24.990030 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d8mkh"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.007079 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-46xqx"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.013688 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-46xqx"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.020500 4956 scope.go:117] "RemoveContainer" containerID="2e3efed30cd9ddd2f1bbafecf0c524a1268e95089b9b3a7eef27c65de5d6024b" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.021337 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bn9xr"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.025488 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bn9xr"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.039978 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5khrp"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.040018 4956 scope.go:117] "RemoveContainer" containerID="5f5b3e7a1aeebd57d07d9847751f28ecaf9a69ffe563c58100158050e3ec5f74" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.042944 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5khrp"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.045946 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l5286"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.050285 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l5286"] Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.055438 4956 scope.go:117] "RemoveContainer" containerID="e5b084efce5e1c164208abf5b6fe5b0103b19bbe4e1343c802aeeece29884836" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.074504 4956 scope.go:117] "RemoveContainer" containerID="331c3c10550457239ac2e38348bf93dab6b136c5c37453343687ebc567560af5" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.087593 4956 scope.go:117] "RemoveContainer" containerID="4716eb3ec77e80bbc9b606ab179098b203d857090cc60852eedd5d42d7ca493d" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.100690 4956 scope.go:117] "RemoveContainer" containerID="1cb114b799a9b35da1d1cc8c6141c02fcf639b3a46a52fc220e34558c6f4656e" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.111361 4956 scope.go:117] "RemoveContainer" containerID="117ae9c7ee44b3d6411f31f57c8305711d7cc4cc394892ee174307b7c5bde141" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.121064 4956 scope.go:117] "RemoveContainer" containerID="c3fed00e13fa0add010ec9560c6db643ca46e509c24b073f0fc87f79ed7cc9ef" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.133298 4956 scope.go:117] "RemoveContainer" containerID="c599b95c1217e839f0a604c544e7e0ca3c5834eb49d5773d41908988dd824e0c" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.147441 4956 scope.go:117] "RemoveContainer" containerID="28a2de006491ea97b6fe4e5988d1c16a3ce0116eccc30640e3bdc8631d78154d" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818087 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9m6q7"] Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818616 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818632 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818646 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818654 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818664 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818673 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818680 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818688 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818697 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818704 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818712 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818719 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818730 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818738 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818748 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818755 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818787 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818797 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818809 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818818 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818834 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818843 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="extract-content" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818852 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818859 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: E1211 21:53:25.818871 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818878 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="extract-utilities" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.818986 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa022a30-6487-45c0-82b7-336a05167918" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.819000 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="48520909-a6cd-4ec4-a6db-35a778505823" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.819014 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.819022 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" containerName="marketplace-operator" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.819030 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" containerName="registry-server" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.819945 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.822190 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 21:53:25 crc kubenswrapper[4956]: I1211 21:53:25.830874 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9m6q7"] Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.015029 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5287560-f940-4626-9cb6-1b0a16a25a1b-catalog-content\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.015114 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-979ht\" (UniqueName: \"kubernetes.io/projected/d5287560-f940-4626-9cb6-1b0a16a25a1b-kube-api-access-979ht\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.015149 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5287560-f940-4626-9cb6-1b0a16a25a1b-utilities\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.015966 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fzj5l"] Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.017265 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.029007 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.033632 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336" path="/var/lib/kubelet/pods/3d6e0f4b-bd86-4445-87c3-0c9b7cc4e336/volumes" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.034749 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46394f6a-9e6f-49f8-a879-1753789c4ba0" path="/var/lib/kubelet/pods/46394f6a-9e6f-49f8-a879-1753789c4ba0/volumes" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.035590 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48520909-a6cd-4ec4-a6db-35a778505823" path="/var/lib/kubelet/pods/48520909-a6cd-4ec4-a6db-35a778505823/volumes" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.038041 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62802da2-70ad-46d2-bc51-b9bf3e0b6086" path="/var/lib/kubelet/pods/62802da2-70ad-46d2-bc51-b9bf3e0b6086/volumes" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.038620 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa022a30-6487-45c0-82b7-336a05167918" path="/var/lib/kubelet/pods/aa022a30-6487-45c0-82b7-336a05167918/volumes" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.039327 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzj5l"] Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.116657 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5287560-f940-4626-9cb6-1b0a16a25a1b-catalog-content\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.116710 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-979ht\" (UniqueName: \"kubernetes.io/projected/d5287560-f940-4626-9cb6-1b0a16a25a1b-kube-api-access-979ht\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.116901 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5287560-f940-4626-9cb6-1b0a16a25a1b-utilities\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.117709 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d5287560-f940-4626-9cb6-1b0a16a25a1b-catalog-content\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.117852 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d5287560-f940-4626-9cb6-1b0a16a25a1b-utilities\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.135141 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-979ht\" (UniqueName: \"kubernetes.io/projected/d5287560-f940-4626-9cb6-1b0a16a25a1b-kube-api-access-979ht\") pod \"certified-operators-9m6q7\" (UID: \"d5287560-f940-4626-9cb6-1b0a16a25a1b\") " pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.151677 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.219193 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-catalog-content\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.219261 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnxh7\" (UniqueName: \"kubernetes.io/projected/815f1988-351e-416f-b414-3ed53388a8ae-kube-api-access-pnxh7\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.219301 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-utilities\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.320220 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-catalog-content\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.320637 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnxh7\" (UniqueName: \"kubernetes.io/projected/815f1988-351e-416f-b414-3ed53388a8ae-kube-api-access-pnxh7\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.320706 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-utilities\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.320754 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-catalog-content\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.321385 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-utilities\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.337246 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnxh7\" (UniqueName: \"kubernetes.io/projected/815f1988-351e-416f-b414-3ed53388a8ae-kube-api-access-pnxh7\") pod \"redhat-marketplace-fzj5l\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.365118 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9m6q7"] Dec 11 21:53:26 crc kubenswrapper[4956]: W1211 21:53:26.366219 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5287560_f940_4626_9cb6_1b0a16a25a1b.slice/crio-af8b2d9f34e05678812afb69202a0014092599fac06afcdef598aa615b08697e WatchSource:0}: Error finding container af8b2d9f34e05678812afb69202a0014092599fac06afcdef598aa615b08697e: Status 404 returned error can't find the container with id af8b2d9f34e05678812afb69202a0014092599fac06afcdef598aa615b08697e Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.635323 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.966735 4956 generic.go:334] "Generic (PLEG): container finished" podID="d5287560-f940-4626-9cb6-1b0a16a25a1b" containerID="ddf0a082d79554a5a422ce32f46b85401cd0376c5c24e194360470599178d992" exitCode=0 Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.966801 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m6q7" event={"ID":"d5287560-f940-4626-9cb6-1b0a16a25a1b","Type":"ContainerDied","Data":"ddf0a082d79554a5a422ce32f46b85401cd0376c5c24e194360470599178d992"} Dec 11 21:53:26 crc kubenswrapper[4956]: I1211 21:53:26.967107 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m6q7" event={"ID":"d5287560-f940-4626-9cb6-1b0a16a25a1b","Type":"ContainerStarted","Data":"af8b2d9f34e05678812afb69202a0014092599fac06afcdef598aa615b08697e"} Dec 11 21:53:27 crc kubenswrapper[4956]: I1211 21:53:27.029021 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzj5l"] Dec 11 21:53:27 crc kubenswrapper[4956]: I1211 21:53:27.877534 4956 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 11 21:53:27 crc kubenswrapper[4956]: I1211 21:53:27.974803 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m6q7" event={"ID":"d5287560-f940-4626-9cb6-1b0a16a25a1b","Type":"ContainerStarted","Data":"987d71e80443f5fca2b7997c1da1348bd59b0882d67a7ce520e9f3df7371cff6"} Dec 11 21:53:27 crc kubenswrapper[4956]: I1211 21:53:27.976445 4956 generic.go:334] "Generic (PLEG): container finished" podID="815f1988-351e-416f-b414-3ed53388a8ae" containerID="48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df" exitCode=0 Dec 11 21:53:27 crc kubenswrapper[4956]: I1211 21:53:27.976485 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzj5l" event={"ID":"815f1988-351e-416f-b414-3ed53388a8ae","Type":"ContainerDied","Data":"48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df"} Dec 11 21:53:27 crc kubenswrapper[4956]: I1211 21:53:27.976508 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzj5l" event={"ID":"815f1988-351e-416f-b414-3ed53388a8ae","Type":"ContainerStarted","Data":"54708f6f5935f07c53c7b3c119ec33da4df5a3f0828f1c5ede4c5efc556e5254"} Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.222133 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7cd6d"] Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.235012 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7cd6d"] Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.235232 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.238245 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.251638 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c6c2a49-1562-4731-83ac-680213b5830f-utilities\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.251680 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrzw7\" (UniqueName: \"kubernetes.io/projected/3c6c2a49-1562-4731-83ac-680213b5830f-kube-api-access-jrzw7\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.251713 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c6c2a49-1562-4731-83ac-680213b5830f-catalog-content\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.352445 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c6c2a49-1562-4731-83ac-680213b5830f-catalog-content\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.352531 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c6c2a49-1562-4731-83ac-680213b5830f-utilities\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.352552 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrzw7\" (UniqueName: \"kubernetes.io/projected/3c6c2a49-1562-4731-83ac-680213b5830f-kube-api-access-jrzw7\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.353058 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c6c2a49-1562-4731-83ac-680213b5830f-catalog-content\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.353161 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c6c2a49-1562-4731-83ac-680213b5830f-utilities\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.372402 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrzw7\" (UniqueName: \"kubernetes.io/projected/3c6c2a49-1562-4731-83ac-680213b5830f-kube-api-access-jrzw7\") pod \"redhat-operators-7cd6d\" (UID: \"3c6c2a49-1562-4731-83ac-680213b5830f\") " pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.417332 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zlq72"] Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.419154 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.421846 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.430834 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zlq72"] Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.552323 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.555143 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d029043f-7a0a-43fd-a899-ee79724ca7ac-utilities\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.555219 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67t68\" (UniqueName: \"kubernetes.io/projected/d029043f-7a0a-43fd-a899-ee79724ca7ac-kube-api-access-67t68\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.555308 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d029043f-7a0a-43fd-a899-ee79724ca7ac-catalog-content\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.656895 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67t68\" (UniqueName: \"kubernetes.io/projected/d029043f-7a0a-43fd-a899-ee79724ca7ac-kube-api-access-67t68\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.656990 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d029043f-7a0a-43fd-a899-ee79724ca7ac-catalog-content\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.657013 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d029043f-7a0a-43fd-a899-ee79724ca7ac-utilities\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.657709 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d029043f-7a0a-43fd-a899-ee79724ca7ac-utilities\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.657897 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d029043f-7a0a-43fd-a899-ee79724ca7ac-catalog-content\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.694228 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67t68\" (UniqueName: \"kubernetes.io/projected/d029043f-7a0a-43fd-a899-ee79724ca7ac-kube-api-access-67t68\") pod \"community-operators-zlq72\" (UID: \"d029043f-7a0a-43fd-a899-ee79724ca7ac\") " pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.752841 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.935605 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7cd6d"] Dec 11 21:53:28 crc kubenswrapper[4956]: W1211 21:53:28.940241 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c6c2a49_1562_4731_83ac_680213b5830f.slice/crio-d359116be2ee520cb8a00b61e0706c651fa792efe0819ee822993537d352e1dd WatchSource:0}: Error finding container d359116be2ee520cb8a00b61e0706c651fa792efe0819ee822993537d352e1dd: Status 404 returned error can't find the container with id d359116be2ee520cb8a00b61e0706c651fa792efe0819ee822993537d352e1dd Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.996361 4956 generic.go:334] "Generic (PLEG): container finished" podID="d5287560-f940-4626-9cb6-1b0a16a25a1b" containerID="987d71e80443f5fca2b7997c1da1348bd59b0882d67a7ce520e9f3df7371cff6" exitCode=0 Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.997535 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m6q7" event={"ID":"d5287560-f940-4626-9cb6-1b0a16a25a1b","Type":"ContainerDied","Data":"987d71e80443f5fca2b7997c1da1348bd59b0882d67a7ce520e9f3df7371cff6"} Dec 11 21:53:28 crc kubenswrapper[4956]: I1211 21:53:28.998660 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cd6d" event={"ID":"3c6c2a49-1562-4731-83ac-680213b5830f","Type":"ContainerStarted","Data":"d359116be2ee520cb8a00b61e0706c651fa792efe0819ee822993537d352e1dd"} Dec 11 21:53:29 crc kubenswrapper[4956]: I1211 21:53:29.136592 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zlq72"] Dec 11 21:53:29 crc kubenswrapper[4956]: W1211 21:53:29.147419 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd029043f_7a0a_43fd_a899_ee79724ca7ac.slice/crio-dcf2800e9ab95e3ddd6a64f57f8bd0ef1e361cd8c128316d35f1022711e5c835 WatchSource:0}: Error finding container dcf2800e9ab95e3ddd6a64f57f8bd0ef1e361cd8c128316d35f1022711e5c835: Status 404 returned error can't find the container with id dcf2800e9ab95e3ddd6a64f57f8bd0ef1e361cd8c128316d35f1022711e5c835 Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.005571 4956 generic.go:334] "Generic (PLEG): container finished" podID="815f1988-351e-416f-b414-3ed53388a8ae" containerID="6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02" exitCode=0 Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.005668 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzj5l" event={"ID":"815f1988-351e-416f-b414-3ed53388a8ae","Type":"ContainerDied","Data":"6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02"} Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.008309 4956 generic.go:334] "Generic (PLEG): container finished" podID="3c6c2a49-1562-4731-83ac-680213b5830f" containerID="316d2d0e6882e6a12da5d4873131a99e1391c5d6e71c3d02675a8b89a637fefe" exitCode=0 Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.008429 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cd6d" event={"ID":"3c6c2a49-1562-4731-83ac-680213b5830f","Type":"ContainerDied","Data":"316d2d0e6882e6a12da5d4873131a99e1391c5d6e71c3d02675a8b89a637fefe"} Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.011129 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m6q7" event={"ID":"d5287560-f940-4626-9cb6-1b0a16a25a1b","Type":"ContainerStarted","Data":"b0809c415d09949f0bc0963c7f65a15db5ec134a2360bb4d2084b924ccb3e89a"} Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.013439 4956 generic.go:334] "Generic (PLEG): container finished" podID="d029043f-7a0a-43fd-a899-ee79724ca7ac" containerID="3b9de7e439a1a2b3d1d1779dfab117bcb22a6777c03132d62d70f29fd44c7b70" exitCode=0 Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.013600 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlq72" event={"ID":"d029043f-7a0a-43fd-a899-ee79724ca7ac","Type":"ContainerDied","Data":"3b9de7e439a1a2b3d1d1779dfab117bcb22a6777c03132d62d70f29fd44c7b70"} Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.013650 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlq72" event={"ID":"d029043f-7a0a-43fd-a899-ee79724ca7ac","Type":"ContainerStarted","Data":"dcf2800e9ab95e3ddd6a64f57f8bd0ef1e361cd8c128316d35f1022711e5c835"} Dec 11 21:53:30 crc kubenswrapper[4956]: I1211 21:53:30.085727 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9m6q7" podStartSLOduration=2.6328748109999998 podStartE2EDuration="5.085710347s" podCreationTimestamp="2025-12-11 21:53:25 +0000 UTC" firstStartedPulling="2025-12-11 21:53:26.969374328 +0000 UTC m=+299.413752478" lastFinishedPulling="2025-12-11 21:53:29.422209864 +0000 UTC m=+301.866588014" observedRunningTime="2025-12-11 21:53:30.084753339 +0000 UTC m=+302.529131499" watchObservedRunningTime="2025-12-11 21:53:30.085710347 +0000 UTC m=+302.530088487" Dec 11 21:53:31 crc kubenswrapper[4956]: I1211 21:53:31.036790 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlq72" event={"ID":"d029043f-7a0a-43fd-a899-ee79724ca7ac","Type":"ContainerStarted","Data":"8e0055832a5d3b63d5bb79504a83aad38e83761e2bf0ccba4594ba1548c82e32"} Dec 11 21:53:31 crc kubenswrapper[4956]: I1211 21:53:31.038896 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzj5l" event={"ID":"815f1988-351e-416f-b414-3ed53388a8ae","Type":"ContainerStarted","Data":"9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa"} Dec 11 21:53:31 crc kubenswrapper[4956]: I1211 21:53:31.041673 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cd6d" event={"ID":"3c6c2a49-1562-4731-83ac-680213b5830f","Type":"ContainerStarted","Data":"b8c6f0f19a355085a3d39e8afca2c3368580bb8a21961ec28010aa25191ad02a"} Dec 11 21:53:31 crc kubenswrapper[4956]: I1211 21:53:31.082032 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fzj5l" podStartSLOduration=2.407551865 podStartE2EDuration="5.082011662s" podCreationTimestamp="2025-12-11 21:53:26 +0000 UTC" firstStartedPulling="2025-12-11 21:53:27.977660902 +0000 UTC m=+300.422039062" lastFinishedPulling="2025-12-11 21:53:30.652120709 +0000 UTC m=+303.096498859" observedRunningTime="2025-12-11 21:53:31.077759998 +0000 UTC m=+303.522138168" watchObservedRunningTime="2025-12-11 21:53:31.082011662 +0000 UTC m=+303.526389812" Dec 11 21:53:32 crc kubenswrapper[4956]: I1211 21:53:32.047821 4956 generic.go:334] "Generic (PLEG): container finished" podID="3c6c2a49-1562-4731-83ac-680213b5830f" containerID="b8c6f0f19a355085a3d39e8afca2c3368580bb8a21961ec28010aa25191ad02a" exitCode=0 Dec 11 21:53:32 crc kubenswrapper[4956]: I1211 21:53:32.047869 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cd6d" event={"ID":"3c6c2a49-1562-4731-83ac-680213b5830f","Type":"ContainerDied","Data":"b8c6f0f19a355085a3d39e8afca2c3368580bb8a21961ec28010aa25191ad02a"} Dec 11 21:53:32 crc kubenswrapper[4956]: I1211 21:53:32.051351 4956 generic.go:334] "Generic (PLEG): container finished" podID="d029043f-7a0a-43fd-a899-ee79724ca7ac" containerID="8e0055832a5d3b63d5bb79504a83aad38e83761e2bf0ccba4594ba1548c82e32" exitCode=0 Dec 11 21:53:32 crc kubenswrapper[4956]: I1211 21:53:32.051416 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlq72" event={"ID":"d029043f-7a0a-43fd-a899-ee79724ca7ac","Type":"ContainerDied","Data":"8e0055832a5d3b63d5bb79504a83aad38e83761e2bf0ccba4594ba1548c82e32"} Dec 11 21:53:33 crc kubenswrapper[4956]: I1211 21:53:33.062299 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7cd6d" event={"ID":"3c6c2a49-1562-4731-83ac-680213b5830f","Type":"ContainerStarted","Data":"d7dc16fdebfe4df399e279f7021f1e6f1b6bbab8ef81761c38dcb43226b1477d"} Dec 11 21:53:33 crc kubenswrapper[4956]: I1211 21:53:33.064136 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zlq72" event={"ID":"d029043f-7a0a-43fd-a899-ee79724ca7ac","Type":"ContainerStarted","Data":"c1b145833f00d3d954aeda0bd3d5b37eedec5ea96bb2f7b242c35c4329a0eb38"} Dec 11 21:53:33 crc kubenswrapper[4956]: I1211 21:53:33.085653 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7cd6d" podStartSLOduration=2.454766426 podStartE2EDuration="5.085632513s" podCreationTimestamp="2025-12-11 21:53:28 +0000 UTC" firstStartedPulling="2025-12-11 21:53:30.009538226 +0000 UTC m=+302.453916376" lastFinishedPulling="2025-12-11 21:53:32.640404313 +0000 UTC m=+305.084782463" observedRunningTime="2025-12-11 21:53:33.081539683 +0000 UTC m=+305.525917843" watchObservedRunningTime="2025-12-11 21:53:33.085632513 +0000 UTC m=+305.530010663" Dec 11 21:53:33 crc kubenswrapper[4956]: I1211 21:53:33.104138 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zlq72" podStartSLOduration=2.56469192 podStartE2EDuration="5.104117461s" podCreationTimestamp="2025-12-11 21:53:28 +0000 UTC" firstStartedPulling="2025-12-11 21:53:30.0144792 +0000 UTC m=+302.458857350" lastFinishedPulling="2025-12-11 21:53:32.553904701 +0000 UTC m=+304.998282891" observedRunningTime="2025-12-11 21:53:33.099932679 +0000 UTC m=+305.544310829" watchObservedRunningTime="2025-12-11 21:53:33.104117461 +0000 UTC m=+305.548495611" Dec 11 21:53:34 crc kubenswrapper[4956]: I1211 21:53:34.252506 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-xmzw8" Dec 11 21:53:34 crc kubenswrapper[4956]: I1211 21:53:34.309742 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hnrtk"] Dec 11 21:53:36 crc kubenswrapper[4956]: I1211 21:53:36.152615 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:36 crc kubenswrapper[4956]: I1211 21:53:36.153760 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:36 crc kubenswrapper[4956]: I1211 21:53:36.190254 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:36 crc kubenswrapper[4956]: I1211 21:53:36.636115 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:36 crc kubenswrapper[4956]: I1211 21:53:36.636303 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:36 crc kubenswrapper[4956]: I1211 21:53:36.677248 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:37 crc kubenswrapper[4956]: I1211 21:53:37.129002 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 21:53:37 crc kubenswrapper[4956]: I1211 21:53:37.132370 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9m6q7" Dec 11 21:53:38 crc kubenswrapper[4956]: I1211 21:53:38.553142 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:38 crc kubenswrapper[4956]: I1211 21:53:38.553540 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:38 crc kubenswrapper[4956]: I1211 21:53:38.588835 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:38 crc kubenswrapper[4956]: I1211 21:53:38.753500 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:38 crc kubenswrapper[4956]: I1211 21:53:38.753545 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:38 crc kubenswrapper[4956]: I1211 21:53:38.790214 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:39 crc kubenswrapper[4956]: I1211 21:53:39.138895 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zlq72" Dec 11 21:53:39 crc kubenswrapper[4956]: I1211 21:53:39.141065 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7cd6d" Dec 11 21:53:59 crc kubenswrapper[4956]: I1211 21:53:59.359817 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" podUID="6bcc8182-6e42-4b00-a247-803f1b9bd1d3" containerName="registry" containerID="cri-o://d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8" gracePeriod=30 Dec 11 21:53:59 crc kubenswrapper[4956]: I1211 21:53:59.985122 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.104311 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b54m7\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-kube-api-access-b54m7\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.104378 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-bound-sa-token\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.104411 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-tls\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.105182 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-trusted-ca\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.105266 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-certificates\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.105470 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.105515 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-ca-trust-extracted\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.105552 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-installation-pull-secrets\") pod \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\" (UID: \"6bcc8182-6e42-4b00-a247-803f1b9bd1d3\") " Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.108836 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.116580 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.117657 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.121123 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.125705 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.126487 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-kube-api-access-b54m7" (OuterVolumeSpecName: "kube-api-access-b54m7") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "kube-api-access-b54m7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.131123 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.135911 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "6bcc8182-6e42-4b00-a247-803f1b9bd1d3" (UID: "6bcc8182-6e42-4b00-a247-803f1b9bd1d3"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206563 4956 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206606 4956 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206620 4956 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206633 4956 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206645 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b54m7\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-kube-api-access-b54m7\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206656 4956 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.206667 4956 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6bcc8182-6e42-4b00-a247-803f1b9bd1d3-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.211727 4956 generic.go:334] "Generic (PLEG): container finished" podID="6bcc8182-6e42-4b00-a247-803f1b9bd1d3" containerID="d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8" exitCode=0 Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.211759 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" event={"ID":"6bcc8182-6e42-4b00-a247-803f1b9bd1d3","Type":"ContainerDied","Data":"d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8"} Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.211801 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" event={"ID":"6bcc8182-6e42-4b00-a247-803f1b9bd1d3","Type":"ContainerDied","Data":"d2e8266f2d17334e3c52ad78995eb56b318f07dc15403ae94e34ee9bdf6c8ed8"} Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.211834 4956 scope.go:117] "RemoveContainer" containerID="d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.211849 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-hnrtk" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.235191 4956 scope.go:117] "RemoveContainer" containerID="d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8" Dec 11 21:54:00 crc kubenswrapper[4956]: E1211 21:54:00.235726 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8\": container with ID starting with d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8 not found: ID does not exist" containerID="d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.235792 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8"} err="failed to get container status \"d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8\": rpc error: code = NotFound desc = could not find container \"d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8\": container with ID starting with d75c401adfba87a7daf86cd15a1619be7e4b04807ad00fb6c9a45007c5e179e8 not found: ID does not exist" Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.246396 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hnrtk"] Dec 11 21:54:00 crc kubenswrapper[4956]: I1211 21:54:00.252126 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-hnrtk"] Dec 11 21:54:02 crc kubenswrapper[4956]: I1211 21:54:02.034848 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bcc8182-6e42-4b00-a247-803f1b9bd1d3" path="/var/lib/kubelet/pods/6bcc8182-6e42-4b00-a247-803f1b9bd1d3/volumes" Dec 11 21:54:46 crc kubenswrapper[4956]: I1211 21:54:46.888374 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:54:46 crc kubenswrapper[4956]: I1211 21:54:46.888983 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:55:16 crc kubenswrapper[4956]: I1211 21:55:16.887726 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:55:16 crc kubenswrapper[4956]: I1211 21:55:16.888373 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:55:46 crc kubenswrapper[4956]: I1211 21:55:46.888575 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:55:46 crc kubenswrapper[4956]: I1211 21:55:46.889214 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:55:46 crc kubenswrapper[4956]: I1211 21:55:46.889291 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:55:46 crc kubenswrapper[4956]: I1211 21:55:46.890185 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b26c68c76a615f74dfaca11d8d29401bc54299569edf6b7ab00d1822c24a1a25"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 21:55:46 crc kubenswrapper[4956]: I1211 21:55:46.890314 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://b26c68c76a615f74dfaca11d8d29401bc54299569edf6b7ab00d1822c24a1a25" gracePeriod=600 Dec 11 21:55:47 crc kubenswrapper[4956]: I1211 21:55:47.890676 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="b26c68c76a615f74dfaca11d8d29401bc54299569edf6b7ab00d1822c24a1a25" exitCode=0 Dec 11 21:55:47 crc kubenswrapper[4956]: I1211 21:55:47.890814 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"b26c68c76a615f74dfaca11d8d29401bc54299569edf6b7ab00d1822c24a1a25"} Dec 11 21:55:47 crc kubenswrapper[4956]: I1211 21:55:47.891266 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"2913b125d5d4273e7a22b870a21fcdd6061910016396e2d50d698aaca6bf5cbe"} Dec 11 21:55:47 crc kubenswrapper[4956]: I1211 21:55:47.891313 4956 scope.go:117] "RemoveContainer" containerID="14f33798be929c68f53834952ede68aa6c2a866f5a76db2a46703664c9338e9d" Dec 11 21:56:28 crc kubenswrapper[4956]: I1211 21:56:28.204648 4956 scope.go:117] "RemoveContainer" containerID="ffc9556e97d258b01bfb3b9cd94e82f237e03be95ffb33d792ec60994de8aec4" Dec 11 21:56:28 crc kubenswrapper[4956]: I1211 21:56:28.233734 4956 scope.go:117] "RemoveContainer" containerID="81b0ae5b94cba7605e6cda759f7e64ff82d64f2b7d337b0e44d09b64237129d7" Dec 11 21:58:16 crc kubenswrapper[4956]: I1211 21:58:16.888825 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:58:16 crc kubenswrapper[4956]: I1211 21:58:16.889514 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:58:28 crc kubenswrapper[4956]: I1211 21:58:28.299557 4956 scope.go:117] "RemoveContainer" containerID="179a068c34a95783cb10f4dd4f07c8804026fc46f3bd5107b59bd26e89e4d41c" Dec 11 21:58:28 crc kubenswrapper[4956]: I1211 21:58:28.316076 4956 scope.go:117] "RemoveContainer" containerID="30d281b059e9fe33c1446ea8f9098753347f01e3980a00fa2f52fd7aefabde95" Dec 11 21:58:46 crc kubenswrapper[4956]: I1211 21:58:46.888087 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:58:46 crc kubenswrapper[4956]: I1211 21:58:46.888857 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:59:16 crc kubenswrapper[4956]: I1211 21:59:16.888239 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 21:59:16 crc kubenswrapper[4956]: I1211 21:59:16.889020 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 21:59:16 crc kubenswrapper[4956]: I1211 21:59:16.889087 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 21:59:16 crc kubenswrapper[4956]: I1211 21:59:16.889870 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2913b125d5d4273e7a22b870a21fcdd6061910016396e2d50d698aaca6bf5cbe"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 21:59:16 crc kubenswrapper[4956]: I1211 21:59:16.889954 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://2913b125d5d4273e7a22b870a21fcdd6061910016396e2d50d698aaca6bf5cbe" gracePeriod=600 Dec 11 21:59:17 crc kubenswrapper[4956]: I1211 21:59:17.241429 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="2913b125d5d4273e7a22b870a21fcdd6061910016396e2d50d698aaca6bf5cbe" exitCode=0 Dec 11 21:59:17 crc kubenswrapper[4956]: I1211 21:59:17.241525 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"2913b125d5d4273e7a22b870a21fcdd6061910016396e2d50d698aaca6bf5cbe"} Dec 11 21:59:17 crc kubenswrapper[4956]: I1211 21:59:17.241780 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"4499626ee92b2b1ce574f017b854a027fdb33d8effd0a947335164f75f9ce2f0"} Dec 11 21:59:17 crc kubenswrapper[4956]: I1211 21:59:17.241806 4956 scope.go:117] "RemoveContainer" containerID="b26c68c76a615f74dfaca11d8d29401bc54299569edf6b7ab00d1822c24a1a25" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.203214 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6"] Dec 11 22:00:00 crc kubenswrapper[4956]: E1211 22:00:00.204284 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bcc8182-6e42-4b00-a247-803f1b9bd1d3" containerName="registry" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.204306 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bcc8182-6e42-4b00-a247-803f1b9bd1d3" containerName="registry" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.204458 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bcc8182-6e42-4b00-a247-803f1b9bd1d3" containerName="registry" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.205042 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.207139 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6"] Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.207994 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.208389 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.237824 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqr55\" (UniqueName: \"kubernetes.io/projected/5c6c79ab-9970-4b28-83e4-f37951de3525-kube-api-access-kqr55\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.237871 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c6c79ab-9970-4b28-83e4-f37951de3525-secret-volume\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.237897 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c6c79ab-9970-4b28-83e4-f37951de3525-config-volume\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.339384 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqr55\" (UniqueName: \"kubernetes.io/projected/5c6c79ab-9970-4b28-83e4-f37951de3525-kube-api-access-kqr55\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.339456 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c6c79ab-9970-4b28-83e4-f37951de3525-secret-volume\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.339541 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c6c79ab-9970-4b28-83e4-f37951de3525-config-volume\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.340644 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c6c79ab-9970-4b28-83e4-f37951de3525-config-volume\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.350705 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c6c79ab-9970-4b28-83e4-f37951de3525-secret-volume\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.357009 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqr55\" (UniqueName: \"kubernetes.io/projected/5c6c79ab-9970-4b28-83e4-f37951de3525-kube-api-access-kqr55\") pod \"collect-profiles-29424840-ghmm6\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.550678 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:00 crc kubenswrapper[4956]: I1211 22:00:00.741085 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6"] Dec 11 22:00:01 crc kubenswrapper[4956]: I1211 22:00:01.521181 4956 generic.go:334] "Generic (PLEG): container finished" podID="5c6c79ab-9970-4b28-83e4-f37951de3525" containerID="a035a355a24d9e11132676d0599733ae654f5677fc7bcd4a2c211dfb8e2bab68" exitCode=0 Dec 11 22:00:01 crc kubenswrapper[4956]: I1211 22:00:01.521238 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" event={"ID":"5c6c79ab-9970-4b28-83e4-f37951de3525","Type":"ContainerDied","Data":"a035a355a24d9e11132676d0599733ae654f5677fc7bcd4a2c211dfb8e2bab68"} Dec 11 22:00:01 crc kubenswrapper[4956]: I1211 22:00:01.521547 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" event={"ID":"5c6c79ab-9970-4b28-83e4-f37951de3525","Type":"ContainerStarted","Data":"53577b73694e5406ab9970084c6a27ef35a8f96b11b5fbb3974ae32dc82c2125"} Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.762049 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.773340 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c6c79ab-9970-4b28-83e4-f37951de3525-secret-volume\") pod \"5c6c79ab-9970-4b28-83e4-f37951de3525\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.773388 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqr55\" (UniqueName: \"kubernetes.io/projected/5c6c79ab-9970-4b28-83e4-f37951de3525-kube-api-access-kqr55\") pod \"5c6c79ab-9970-4b28-83e4-f37951de3525\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.773409 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c6c79ab-9970-4b28-83e4-f37951de3525-config-volume\") pod \"5c6c79ab-9970-4b28-83e4-f37951de3525\" (UID: \"5c6c79ab-9970-4b28-83e4-f37951de3525\") " Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.774165 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c6c79ab-9970-4b28-83e4-f37951de3525-config-volume" (OuterVolumeSpecName: "config-volume") pod "5c6c79ab-9970-4b28-83e4-f37951de3525" (UID: "5c6c79ab-9970-4b28-83e4-f37951de3525"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.778670 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c6c79ab-9970-4b28-83e4-f37951de3525-kube-api-access-kqr55" (OuterVolumeSpecName: "kube-api-access-kqr55") pod "5c6c79ab-9970-4b28-83e4-f37951de3525" (UID: "5c6c79ab-9970-4b28-83e4-f37951de3525"). InnerVolumeSpecName "kube-api-access-kqr55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.779046 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c6c79ab-9970-4b28-83e4-f37951de3525-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5c6c79ab-9970-4b28-83e4-f37951de3525" (UID: "5c6c79ab-9970-4b28-83e4-f37951de3525"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.875017 4956 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c6c79ab-9970-4b28-83e4-f37951de3525-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.875074 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqr55\" (UniqueName: \"kubernetes.io/projected/5c6c79ab-9970-4b28-83e4-f37951de3525-kube-api-access-kqr55\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:02 crc kubenswrapper[4956]: I1211 22:00:02.875093 4956 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c6c79ab-9970-4b28-83e4-f37951de3525-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:03 crc kubenswrapper[4956]: I1211 22:00:03.539275 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" event={"ID":"5c6c79ab-9970-4b28-83e4-f37951de3525","Type":"ContainerDied","Data":"53577b73694e5406ab9970084c6a27ef35a8f96b11b5fbb3974ae32dc82c2125"} Dec 11 22:00:03 crc kubenswrapper[4956]: I1211 22:00:03.539364 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424840-ghmm6" Dec 11 22:00:03 crc kubenswrapper[4956]: I1211 22:00:03.539817 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53577b73694e5406ab9970084c6a27ef35a8f96b11b5fbb3974ae32dc82c2125" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.454901 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-v52ql"] Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456112 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-controller" containerID="cri-o://71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456155 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="nbdb" containerID="cri-o://5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456234 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="northd" containerID="cri-o://638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456230 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-acl-logging" containerID="cri-o://4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456268 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="sbdb" containerID="cri-o://25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456198 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.456198 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-node" containerID="cri-o://42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.493085 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" containerID="cri-o://1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" gracePeriod=30 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.579571 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p8slf_3f5c3105-d748-4563-b3f7-a566d31a3031/kube-multus/1.log" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.580360 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p8slf_3f5c3105-d748-4563-b3f7-a566d31a3031/kube-multus/0.log" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.580408 4956 generic.go:334] "Generic (PLEG): container finished" podID="3f5c3105-d748-4563-b3f7-a566d31a3031" containerID="01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e" exitCode=2 Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.580453 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p8slf" event={"ID":"3f5c3105-d748-4563-b3f7-a566d31a3031","Type":"ContainerDied","Data":"01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e"} Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.580499 4956 scope.go:117] "RemoveContainer" containerID="29845b1bac1f3cb16ecd99b0b0f677d99722334721c2f61067ec3db43e60316c" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.581309 4956 scope.go:117] "RemoveContainer" containerID="01bb9d76de97fef018802271d2a048c8bff06a4a38c4f92fbaa85fd1ec91f41e" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.770735 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/2.log" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.773111 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovn-acl-logging/0.log" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.773502 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovn-controller/0.log" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.773860 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.842917 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-shcn4"] Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843495 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c6c79ab-9970-4b28-83e4-f37951de3525" containerName="collect-profiles" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843541 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c6c79ab-9970-4b28-83e4-f37951de3525" containerName="collect-profiles" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843555 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kubecfg-setup" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843563 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kubecfg-setup" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843577 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843587 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843594 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843603 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843616 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-acl-logging" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843623 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-acl-logging" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843635 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="northd" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843642 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="northd" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843652 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="nbdb" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843659 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="nbdb" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843668 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843676 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843686 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="sbdb" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843693 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="sbdb" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843704 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843710 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843718 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-node" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843725 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-node" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.843734 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843743 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843878 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843892 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843907 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843917 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-acl-logging" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843926 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovn-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843937 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="kube-rbac-proxy-node" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843947 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="sbdb" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843956 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="nbdb" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843965 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="northd" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.843973 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c6c79ab-9970-4b28-83e4-f37951de3525" containerName="collect-profiles" Dec 11 22:00:08 crc kubenswrapper[4956]: E1211 22:00:08.844087 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.844096 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.844213 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.844226 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerName="ovnkube-controller" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.845941 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959009 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-env-overrides\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959057 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-etc-openvswitch\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959078 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-ovn-kubernetes\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959124 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959131 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n6k6\" (UniqueName: \"kubernetes.io/projected/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-kube-api-access-9n6k6\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959166 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959558 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-systemd\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959702 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-var-lib-cni-networks-ovn-kubernetes\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959756 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959761 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959840 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-script-lib\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959896 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-var-lib-openvswitch\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.959947 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-node-log\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960004 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960063 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-netd\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960095 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-node-log" (OuterVolumeSpecName: "node-log") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960115 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-kubelet\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960138 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960167 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-ovn\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960176 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960198 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960220 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-bin\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960268 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-systemd-units\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960270 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960294 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960271 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960326 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-log-socket\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960354 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-log-socket" (OuterVolumeSpecName: "log-socket") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960407 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovn-node-metrics-cert\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960451 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-openvswitch\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960512 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-config\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960565 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-slash\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960576 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960597 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-slash" (OuterVolumeSpecName: "host-slash") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960741 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-netns\") pod \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\" (UID: \"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30\") " Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960811 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.960908 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.961245 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-run-netns\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.961390 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-cni-netd\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.961565 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovnkube-config\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.961743 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-kubelet\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.961963 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovnkube-script-lib\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.962121 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovn-node-metrics-cert\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.962286 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-ovn\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.962344 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-etc-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.962465 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.962639 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4brj\" (UniqueName: \"kubernetes.io/projected/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-kube-api-access-v4brj\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.962862 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-systemd\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963043 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963224 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-run-ovn-kubernetes\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963377 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-var-lib-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963568 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-node-log\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963849 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-cni-bin\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963934 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-systemd-units\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.963985 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-slash\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964256 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-env-overrides\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964446 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-log-socket\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964694 4956 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964743 4956 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964801 4956 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964833 4956 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964869 4956 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964900 4956 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964930 4956 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964956 4956 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-node-log\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.964982 4956 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965010 4956 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965035 4956 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965060 4956 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965101 4956 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965127 4956 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-log-socket\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965153 4956 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965171 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965179 4956 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965237 4956 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-host-slash\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.965565 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-kube-api-access-9n6k6" (OuterVolumeSpecName: "kube-api-access-9n6k6") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "kube-api-access-9n6k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:00:08 crc kubenswrapper[4956]: I1211 22:00:08.980916 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" (UID: "c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066317 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-node-log\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066363 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-cni-bin\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066383 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-systemd-units\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066396 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-slash\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066414 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-env-overrides\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066433 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-log-socket\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066455 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-run-netns\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066472 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-cni-netd\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066473 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-cni-bin\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066487 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovnkube-config\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066553 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-systemd-units\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066575 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-kubelet\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066551 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-run-netns\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066570 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-cni-netd\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066512 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-slash\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066620 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-kubelet\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066670 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovnkube-script-lib\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066658 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-log-socket\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066693 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovn-node-metrics-cert\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066735 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-ovn\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066786 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-etc-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066812 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066834 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4brj\" (UniqueName: \"kubernetes.io/projected/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-kube-api-access-v4brj\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066836 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-ovn\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066859 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-systemd\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066864 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-node-log\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066988 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067009 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067023 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-run-ovn-kubernetes\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067041 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-var-lib-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.066875 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-etc-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067013 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-systemd\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067042 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-run-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067069 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-host-run-ovn-kubernetes\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067098 4956 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067127 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n6k6\" (UniqueName: \"kubernetes.io/projected/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-kube-api-access-9n6k6\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067140 4956 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067099 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-var-lib-openvswitch\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067547 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-env-overrides\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.067560 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovnkube-script-lib\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.073194 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovnkube-config\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.074251 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-ovn-node-metrics-cert\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.090434 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4brj\" (UniqueName: \"kubernetes.io/projected/7a1e267b-65c3-4ebf-8d70-c3519fe43c16-kube-api-access-v4brj\") pod \"ovnkube-node-shcn4\" (UID: \"7a1e267b-65c3-4ebf-8d70-c3519fe43c16\") " pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.161669 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:09 crc kubenswrapper[4956]: W1211 22:00:09.180869 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a1e267b_65c3_4ebf_8d70_c3519fe43c16.slice/crio-9a9a9b0435288aaef74daf46023eaca0567bb3c8ebc1a307ac8fee2faa21975a WatchSource:0}: Error finding container 9a9a9b0435288aaef74daf46023eaca0567bb3c8ebc1a307ac8fee2faa21975a: Status 404 returned error can't find the container with id 9a9a9b0435288aaef74daf46023eaca0567bb3c8ebc1a307ac8fee2faa21975a Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.589428 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovnkube-controller/2.log" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.597408 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovn-acl-logging/0.log" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.600732 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-v52ql_c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/ovn-controller/0.log" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601743 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601838 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601820 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601910 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601958 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601861 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601993 4956 scope.go:117] "RemoveContainer" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602019 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602018 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.601999 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602136 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602162 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602185 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" exitCode=143 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602212 4956 generic.go:334] "Generic (PLEG): container finished" podID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" exitCode=143 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602206 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602277 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602304 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602323 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602336 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602348 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602359 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602371 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602384 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602399 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602420 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602441 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602463 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602480 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602493 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602508 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602523 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602539 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602553 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602568 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602582 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602596 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602616 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602640 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602657 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602671 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602685 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602698 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602714 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602728 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602742 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602756 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602800 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602821 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-v52ql" event={"ID":"c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30","Type":"ContainerDied","Data":"f9b036d90f9d8482bffd0bf02cb9cd9693e51f4eddce514aba0fbceb67a7dad4"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602844 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602861 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602879 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602892 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602903 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602913 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602924 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602934 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602945 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.602955 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.605864 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-p8slf_3f5c3105-d748-4563-b3f7-a566d31a3031/kube-multus/1.log" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.606283 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-p8slf" event={"ID":"3f5c3105-d748-4563-b3f7-a566d31a3031","Type":"ContainerStarted","Data":"fa2a40f6134f312ad0a9536c314bc131d2482ce6ef241c612c51cf3f1cf93281"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.609599 4956 generic.go:334] "Generic (PLEG): container finished" podID="7a1e267b-65c3-4ebf-8d70-c3519fe43c16" containerID="5617038b32df2a361d9d2d999f19ba5688c280ba300209e4fb365792e2e82295" exitCode=0 Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.609655 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerDied","Data":"5617038b32df2a361d9d2d999f19ba5688c280ba300209e4fb365792e2e82295"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.609735 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"9a9a9b0435288aaef74daf46023eaca0567bb3c8ebc1a307ac8fee2faa21975a"} Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.624351 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.647132 4956 scope.go:117] "RemoveContainer" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.667902 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-v52ql"] Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.670004 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-v52ql"] Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.687498 4956 scope.go:117] "RemoveContainer" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.704162 4956 scope.go:117] "RemoveContainer" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.719473 4956 scope.go:117] "RemoveContainer" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.731680 4956 scope.go:117] "RemoveContainer" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.743247 4956 scope.go:117] "RemoveContainer" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.759982 4956 scope.go:117] "RemoveContainer" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.779517 4956 scope.go:117] "RemoveContainer" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.813485 4956 scope.go:117] "RemoveContainer" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.814138 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": container with ID starting with 1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268 not found: ID does not exist" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.814276 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} err="failed to get container status \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": rpc error: code = NotFound desc = could not find container \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": container with ID starting with 1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.814305 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.814546 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": container with ID starting with 0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193 not found: ID does not exist" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.814566 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} err="failed to get container status \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": rpc error: code = NotFound desc = could not find container \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": container with ID starting with 0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.814582 4956 scope.go:117] "RemoveContainer" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.814790 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": container with ID starting with 25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f not found: ID does not exist" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.814813 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} err="failed to get container status \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": rpc error: code = NotFound desc = could not find container \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": container with ID starting with 25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.814827 4956 scope.go:117] "RemoveContainer" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.815004 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": container with ID starting with 5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0 not found: ID does not exist" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.815020 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} err="failed to get container status \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": rpc error: code = NotFound desc = could not find container \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": container with ID starting with 5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.815039 4956 scope.go:117] "RemoveContainer" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.820381 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": container with ID starting with 638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b not found: ID does not exist" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.820410 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} err="failed to get container status \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": rpc error: code = NotFound desc = could not find container \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": container with ID starting with 638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.820425 4956 scope.go:117] "RemoveContainer" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.820840 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": container with ID starting with dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd not found: ID does not exist" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.820864 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} err="failed to get container status \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": rpc error: code = NotFound desc = could not find container \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": container with ID starting with dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.820883 4956 scope.go:117] "RemoveContainer" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.826071 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": container with ID starting with 42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292 not found: ID does not exist" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.826125 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} err="failed to get container status \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": rpc error: code = NotFound desc = could not find container \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": container with ID starting with 42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.826153 4956 scope.go:117] "RemoveContainer" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.826615 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": container with ID starting with 4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8 not found: ID does not exist" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.826644 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} err="failed to get container status \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": rpc error: code = NotFound desc = could not find container \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": container with ID starting with 4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.829810 4956 scope.go:117] "RemoveContainer" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.830324 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": container with ID starting with 71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a not found: ID does not exist" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.830371 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} err="failed to get container status \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": rpc error: code = NotFound desc = could not find container \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": container with ID starting with 71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.830402 4956 scope.go:117] "RemoveContainer" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" Dec 11 22:00:09 crc kubenswrapper[4956]: E1211 22:00:09.830766 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": container with ID starting with dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb not found: ID does not exist" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.830810 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} err="failed to get container status \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": rpc error: code = NotFound desc = could not find container \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": container with ID starting with dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.830832 4956 scope.go:117] "RemoveContainer" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.831524 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} err="failed to get container status \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": rpc error: code = NotFound desc = could not find container \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": container with ID starting with 1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.831549 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.831867 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} err="failed to get container status \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": rpc error: code = NotFound desc = could not find container \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": container with ID starting with 0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.831896 4956 scope.go:117] "RemoveContainer" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.832652 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} err="failed to get container status \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": rpc error: code = NotFound desc = could not find container \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": container with ID starting with 25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.832670 4956 scope.go:117] "RemoveContainer" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.832955 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} err="failed to get container status \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": rpc error: code = NotFound desc = could not find container \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": container with ID starting with 5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.832973 4956 scope.go:117] "RemoveContainer" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833240 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} err="failed to get container status \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": rpc error: code = NotFound desc = could not find container \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": container with ID starting with 638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833257 4956 scope.go:117] "RemoveContainer" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833512 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} err="failed to get container status \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": rpc error: code = NotFound desc = could not find container \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": container with ID starting with dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833529 4956 scope.go:117] "RemoveContainer" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833720 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} err="failed to get container status \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": rpc error: code = NotFound desc = could not find container \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": container with ID starting with 42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833736 4956 scope.go:117] "RemoveContainer" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833928 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} err="failed to get container status \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": rpc error: code = NotFound desc = could not find container \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": container with ID starting with 4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.833952 4956 scope.go:117] "RemoveContainer" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.835142 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} err="failed to get container status \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": rpc error: code = NotFound desc = could not find container \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": container with ID starting with 71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.835155 4956 scope.go:117] "RemoveContainer" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.835965 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} err="failed to get container status \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": rpc error: code = NotFound desc = could not find container \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": container with ID starting with dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.835990 4956 scope.go:117] "RemoveContainer" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.837298 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} err="failed to get container status \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": rpc error: code = NotFound desc = could not find container \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": container with ID starting with 1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.837337 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.837950 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} err="failed to get container status \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": rpc error: code = NotFound desc = could not find container \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": container with ID starting with 0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.837975 4956 scope.go:117] "RemoveContainer" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.838314 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} err="failed to get container status \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": rpc error: code = NotFound desc = could not find container \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": container with ID starting with 25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.838332 4956 scope.go:117] "RemoveContainer" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.838902 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} err="failed to get container status \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": rpc error: code = NotFound desc = could not find container \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": container with ID starting with 5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.838936 4956 scope.go:117] "RemoveContainer" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.839303 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} err="failed to get container status \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": rpc error: code = NotFound desc = could not find container \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": container with ID starting with 638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.839323 4956 scope.go:117] "RemoveContainer" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.841447 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} err="failed to get container status \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": rpc error: code = NotFound desc = could not find container \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": container with ID starting with dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.841482 4956 scope.go:117] "RemoveContainer" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.842126 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} err="failed to get container status \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": rpc error: code = NotFound desc = could not find container \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": container with ID starting with 42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.842153 4956 scope.go:117] "RemoveContainer" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.843889 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} err="failed to get container status \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": rpc error: code = NotFound desc = could not find container \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": container with ID starting with 4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.843916 4956 scope.go:117] "RemoveContainer" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.844405 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} err="failed to get container status \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": rpc error: code = NotFound desc = could not find container \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": container with ID starting with 71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.844423 4956 scope.go:117] "RemoveContainer" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.845080 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} err="failed to get container status \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": rpc error: code = NotFound desc = could not find container \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": container with ID starting with dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.845110 4956 scope.go:117] "RemoveContainer" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.845474 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} err="failed to get container status \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": rpc error: code = NotFound desc = could not find container \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": container with ID starting with 1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.845526 4956 scope.go:117] "RemoveContainer" containerID="0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.845994 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193"} err="failed to get container status \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": rpc error: code = NotFound desc = could not find container \"0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193\": container with ID starting with 0c99d9c1b68bc732c97d1f9912791c6b7a4d17ac5a6cf0f1b6db39f732233193 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.846129 4956 scope.go:117] "RemoveContainer" containerID="25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.846749 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f"} err="failed to get container status \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": rpc error: code = NotFound desc = could not find container \"25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f\": container with ID starting with 25cd173530711ff0e798da59eb61c540fd42d106bcca7e8fc1f07b0f24d4e18f not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.846794 4956 scope.go:117] "RemoveContainer" containerID="5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.847077 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0"} err="failed to get container status \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": rpc error: code = NotFound desc = could not find container \"5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0\": container with ID starting with 5beb5860d059ce465baedad8b456e8c40e5b7ee8623a994d0f1c6ffd3c9ed5b0 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.847107 4956 scope.go:117] "RemoveContainer" containerID="638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.848083 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b"} err="failed to get container status \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": rpc error: code = NotFound desc = could not find container \"638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b\": container with ID starting with 638c6c4d6e61c923253da37b9215b508e8d17847ca7fcf54618b0ba7dddce75b not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.848126 4956 scope.go:117] "RemoveContainer" containerID="dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.848588 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd"} err="failed to get container status \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": rpc error: code = NotFound desc = could not find container \"dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd\": container with ID starting with dc0db5f8cd27028e972982cc386698df61a0a8ce6bc585284b69f384feb336cd not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.848614 4956 scope.go:117] "RemoveContainer" containerID="42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.849196 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292"} err="failed to get container status \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": rpc error: code = NotFound desc = could not find container \"42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292\": container with ID starting with 42e3745ea49b90f20f43773a4ccc9a4e3fcb8cf7bda928645d0c128cf9580292 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.849293 4956 scope.go:117] "RemoveContainer" containerID="4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.849760 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8"} err="failed to get container status \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": rpc error: code = NotFound desc = could not find container \"4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8\": container with ID starting with 4045415196ec24f713dca45bc18e7687541421da6b2fd5cb183e2107fac4bda8 not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.849973 4956 scope.go:117] "RemoveContainer" containerID="71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.850387 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a"} err="failed to get container status \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": rpc error: code = NotFound desc = could not find container \"71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a\": container with ID starting with 71781cf0a0532df32935758af57c7dfe354e533f2007e68c65d0d89ad03d771a not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.850413 4956 scope.go:117] "RemoveContainer" containerID="dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.850705 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb"} err="failed to get container status \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": rpc error: code = NotFound desc = could not find container \"dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb\": container with ID starting with dfa0e85f859e8c8fe6ed9757f9a44891a2268ab6889683e99c56f3d6bd33b6bb not found: ID does not exist" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.850733 4956 scope.go:117] "RemoveContainer" containerID="1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268" Dec 11 22:00:09 crc kubenswrapper[4956]: I1211 22:00:09.851062 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268"} err="failed to get container status \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": rpc error: code = NotFound desc = could not find container \"1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268\": container with ID starting with 1661892711e1bf3a3309935f295c30346d98a2be6471b86a8b94fe3ecc09a268 not found: ID does not exist" Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.027299 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30" path="/var/lib/kubelet/pods/c5a2674b-3cd8-4d21-bdba-2e3d0bfcab30/volumes" Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.620064 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"6b621ae901f5661b2a54d8b0ae10c54421ee1b41dff660ec47064d98051b25b7"} Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.620399 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"be532d376a70bc0b7ba8d354997a41badabec3a0eda392faaf6cf1bfee8acc3b"} Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.620416 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"26f5c9327f711ea9c157c6bb0ac42b321d6cda36d99d7d039f5906cf99c2cd8d"} Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.620430 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"e470e84df17c9cc103caf15cd1ffa080e661682fb0a175c8a360ea5a1cdce069"} Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.620442 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"faad21b2f73299c7be20632d46b6ec77af83a222510063d8795ba1653e233d6c"} Dec 11 22:00:10 crc kubenswrapper[4956]: I1211 22:00:10.620460 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"139e5bb12efe0563a901de59d8a1b83d7e28264d01968fd00a5d35d77eaf962b"} Dec 11 22:00:12 crc kubenswrapper[4956]: I1211 22:00:12.643121 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"006c48c6cd7a14df1eb588cd8ccb9475a6ff4c1c69087e692e874734ae79b5a2"} Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.665487 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" event={"ID":"7a1e267b-65c3-4ebf-8d70-c3519fe43c16","Type":"ContainerStarted","Data":"3d0efd2d349215734168b268c45d2ca4c42822ef24d99fd59fb1664b5f0e1150"} Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.666172 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.666190 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.666204 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.693724 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.693807 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:15 crc kubenswrapper[4956]: I1211 22:00:15.701837 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" podStartSLOduration=7.701816786 podStartE2EDuration="7.701816786s" podCreationTimestamp="2025-12-11 22:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:00:15.698421434 +0000 UTC m=+708.142799594" watchObservedRunningTime="2025-12-11 22:00:15.701816786 +0000 UTC m=+708.146194946" Dec 11 22:00:19 crc kubenswrapper[4956]: I1211 22:00:19.707963 4956 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.448326 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt"] Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.450070 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.453499 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.459016 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt"] Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.557643 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.557705 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59x4g\" (UniqueName: \"kubernetes.io/projected/b8512a22-f9f2-4250-93b8-c125367cc1ad-kube-api-access-59x4g\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.557725 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.658395 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.658446 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59x4g\" (UniqueName: \"kubernetes.io/projected/b8512a22-f9f2-4250-93b8-c125367cc1ad-kube-api-access-59x4g\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.658468 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.658995 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.659433 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.680127 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59x4g\" (UniqueName: \"kubernetes.io/projected/b8512a22-f9f2-4250-93b8-c125367cc1ad-kube-api-access-59x4g\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.773443 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:34 crc kubenswrapper[4956]: I1211 22:00:34.953250 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt"] Dec 11 22:00:35 crc kubenswrapper[4956]: I1211 22:00:35.780757 4956 generic.go:334] "Generic (PLEG): container finished" podID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerID="98517271ec18b506a1ae99af0763b2bc56677494e526c5812fcf8bd0b2c94c64" exitCode=0 Dec 11 22:00:35 crc kubenswrapper[4956]: I1211 22:00:35.780882 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" event={"ID":"b8512a22-f9f2-4250-93b8-c125367cc1ad","Type":"ContainerDied","Data":"98517271ec18b506a1ae99af0763b2bc56677494e526c5812fcf8bd0b2c94c64"} Dec 11 22:00:35 crc kubenswrapper[4956]: I1211 22:00:35.781137 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" event={"ID":"b8512a22-f9f2-4250-93b8-c125367cc1ad","Type":"ContainerStarted","Data":"76be4a68a8b887c47339e930070eb31e7707fd57cee715294d11a74007836ff0"} Dec 11 22:00:35 crc kubenswrapper[4956]: I1211 22:00:35.782432 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.787403 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z6cd6"] Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.788915 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.799679 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z6cd6"] Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.886411 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4v4d\" (UniqueName: \"kubernetes.io/projected/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-kube-api-access-v4v4d\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.886453 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-utilities\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.886497 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-catalog-content\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.987933 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4v4d\" (UniqueName: \"kubernetes.io/projected/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-kube-api-access-v4v4d\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.987987 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-utilities\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.988050 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-catalog-content\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.988710 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-catalog-content\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:36 crc kubenswrapper[4956]: I1211 22:00:36.988830 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-utilities\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.008734 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4v4d\" (UniqueName: \"kubernetes.io/projected/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-kube-api-access-v4v4d\") pod \"redhat-operators-z6cd6\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.146528 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.327393 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z6cd6"] Dec 11 22:00:37 crc kubenswrapper[4956]: W1211 22:00:37.332093 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda003c861_9ed1_46ac_8fd0_2cd0ec7e2afe.slice/crio-42469eb4d53ecd952138b4445530d5dfb0ca98e75c64720d0192e73c08e87de4 WatchSource:0}: Error finding container 42469eb4d53ecd952138b4445530d5dfb0ca98e75c64720d0192e73c08e87de4: Status 404 returned error can't find the container with id 42469eb4d53ecd952138b4445530d5dfb0ca98e75c64720d0192e73c08e87de4 Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.800757 4956 generic.go:334] "Generic (PLEG): container finished" podID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerID="d821dce1c8009b15222281559af44d0014653ab262beddd639e1d3477b93f883" exitCode=0 Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.801722 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerDied","Data":"d821dce1c8009b15222281559af44d0014653ab262beddd639e1d3477b93f883"} Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.801855 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerStarted","Data":"42469eb4d53ecd952138b4445530d5dfb0ca98e75c64720d0192e73c08e87de4"} Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.803193 4956 generic.go:334] "Generic (PLEG): container finished" podID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerID="77268ca2343f110972f1ffbffd6eba5712c2503c8364d1f5b477b01e3efa32c0" exitCode=0 Dec 11 22:00:37 crc kubenswrapper[4956]: I1211 22:00:37.803232 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" event={"ID":"b8512a22-f9f2-4250-93b8-c125367cc1ad","Type":"ContainerDied","Data":"77268ca2343f110972f1ffbffd6eba5712c2503c8364d1f5b477b01e3efa32c0"} Dec 11 22:00:38 crc kubenswrapper[4956]: I1211 22:00:38.817613 4956 generic.go:334] "Generic (PLEG): container finished" podID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerID="f4f55fb188b0731ea1d09262f5be53e2019a06c768e431a5c40c50a627214a8d" exitCode=0 Dec 11 22:00:38 crc kubenswrapper[4956]: I1211 22:00:38.817749 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" event={"ID":"b8512a22-f9f2-4250-93b8-c125367cc1ad","Type":"ContainerDied","Data":"f4f55fb188b0731ea1d09262f5be53e2019a06c768e431a5c40c50a627214a8d"} Dec 11 22:00:38 crc kubenswrapper[4956]: I1211 22:00:38.820575 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerStarted","Data":"685477a1fe20e29961c093f3881f33ec1f060d259d0584438ab9e013c0f7671b"} Dec 11 22:00:39 crc kubenswrapper[4956]: I1211 22:00:39.186231 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-shcn4" Dec 11 22:00:39 crc kubenswrapper[4956]: I1211 22:00:39.828092 4956 generic.go:334] "Generic (PLEG): container finished" podID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerID="685477a1fe20e29961c093f3881f33ec1f060d259d0584438ab9e013c0f7671b" exitCode=0 Dec 11 22:00:39 crc kubenswrapper[4956]: I1211 22:00:39.828154 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerDied","Data":"685477a1fe20e29961c093f3881f33ec1f060d259d0584438ab9e013c0f7671b"} Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.065878 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.133580 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-util\") pod \"b8512a22-f9f2-4250-93b8-c125367cc1ad\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.133665 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59x4g\" (UniqueName: \"kubernetes.io/projected/b8512a22-f9f2-4250-93b8-c125367cc1ad-kube-api-access-59x4g\") pod \"b8512a22-f9f2-4250-93b8-c125367cc1ad\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.133702 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-bundle\") pod \"b8512a22-f9f2-4250-93b8-c125367cc1ad\" (UID: \"b8512a22-f9f2-4250-93b8-c125367cc1ad\") " Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.134761 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-bundle" (OuterVolumeSpecName: "bundle") pod "b8512a22-f9f2-4250-93b8-c125367cc1ad" (UID: "b8512a22-f9f2-4250-93b8-c125367cc1ad"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.139807 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8512a22-f9f2-4250-93b8-c125367cc1ad-kube-api-access-59x4g" (OuterVolumeSpecName: "kube-api-access-59x4g") pod "b8512a22-f9f2-4250-93b8-c125367cc1ad" (UID: "b8512a22-f9f2-4250-93b8-c125367cc1ad"). InnerVolumeSpecName "kube-api-access-59x4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.146390 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-util" (OuterVolumeSpecName: "util") pod "b8512a22-f9f2-4250-93b8-c125367cc1ad" (UID: "b8512a22-f9f2-4250-93b8-c125367cc1ad"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.235670 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.235704 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59x4g\" (UniqueName: \"kubernetes.io/projected/b8512a22-f9f2-4250-93b8-c125367cc1ad-kube-api-access-59x4g\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.235719 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8512a22-f9f2-4250-93b8-c125367cc1ad-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.836918 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerStarted","Data":"b5aca53f0759ed5e19f0915b313d8f8f6c5934d9fae76673c4b3355fb5f39f32"} Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.839784 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" event={"ID":"b8512a22-f9f2-4250-93b8-c125367cc1ad","Type":"ContainerDied","Data":"76be4a68a8b887c47339e930070eb31e7707fd57cee715294d11a74007836ff0"} Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.839812 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76be4a68a8b887c47339e930070eb31e7707fd57cee715294d11a74007836ff0" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.839848 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt" Dec 11 22:00:40 crc kubenswrapper[4956]: I1211 22:00:40.853492 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z6cd6" podStartSLOduration=2.085773107 podStartE2EDuration="4.853471903s" podCreationTimestamp="2025-12-11 22:00:36 +0000 UTC" firstStartedPulling="2025-12-11 22:00:37.802671338 +0000 UTC m=+730.247049488" lastFinishedPulling="2025-12-11 22:00:40.570370094 +0000 UTC m=+733.014748284" observedRunningTime="2025-12-11 22:00:40.850926193 +0000 UTC m=+733.295304363" watchObservedRunningTime="2025-12-11 22:00:40.853471903 +0000 UTC m=+733.297850053" Dec 11 22:00:47 crc kubenswrapper[4956]: I1211 22:00:47.147043 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:47 crc kubenswrapper[4956]: I1211 22:00:47.147416 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:47 crc kubenswrapper[4956]: I1211 22:00:47.187402 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:47 crc kubenswrapper[4956]: I1211 22:00:47.907313 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:48 crc kubenswrapper[4956]: I1211 22:00:48.977117 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z6cd6"] Dec 11 22:00:49 crc kubenswrapper[4956]: I1211 22:00:49.883960 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z6cd6" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="registry-server" containerID="cri-o://b5aca53f0759ed5e19f0915b313d8f8f6c5934d9fae76673c4b3355fb5f39f32" gracePeriod=2 Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.863915 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn"] Dec 11 22:00:51 crc kubenswrapper[4956]: E1211 22:00:51.864394 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="util" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.864406 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="util" Dec 11 22:00:51 crc kubenswrapper[4956]: E1211 22:00:51.864427 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="pull" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.864432 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="pull" Dec 11 22:00:51 crc kubenswrapper[4956]: E1211 22:00:51.864440 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="extract" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.864447 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="extract" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.864529 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8512a22-f9f2-4250-93b8-c125367cc1ad" containerName="extract" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.864886 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.902473 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.902640 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.902749 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.902815 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-wx9z9" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.909372 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.911679 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92356f98-4ebe-4b75-8703-1518fc3ca16a-webhook-cert\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.911742 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92356f98-4ebe-4b75-8703-1518fc3ca16a-apiservice-cert\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.911805 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2blm\" (UniqueName: \"kubernetes.io/projected/92356f98-4ebe-4b75-8703-1518fc3ca16a-kube-api-access-s2blm\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:51 crc kubenswrapper[4956]: I1211 22:00:51.918084 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn"] Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.012393 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92356f98-4ebe-4b75-8703-1518fc3ca16a-apiservice-cert\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.012439 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2blm\" (UniqueName: \"kubernetes.io/projected/92356f98-4ebe-4b75-8703-1518fc3ca16a-kube-api-access-s2blm\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.012488 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92356f98-4ebe-4b75-8703-1518fc3ca16a-webhook-cert\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.017601 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92356f98-4ebe-4b75-8703-1518fc3ca16a-webhook-cert\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.018875 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92356f98-4ebe-4b75-8703-1518fc3ca16a-apiservice-cert\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.030310 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2blm\" (UniqueName: \"kubernetes.io/projected/92356f98-4ebe-4b75-8703-1518fc3ca16a-kube-api-access-s2blm\") pod \"metallb-operator-controller-manager-59698c9c66-vrvkn\" (UID: \"92356f98-4ebe-4b75-8703-1518fc3ca16a\") " pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.208245 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.262555 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d"] Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.263216 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.268248 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.268482 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.268603 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-9rqpz" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.291643 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d"] Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.569672 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st7zp\" (UniqueName: \"kubernetes.io/projected/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-kube-api-access-st7zp\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.569941 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-apiservice-cert\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.570012 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-webhook-cert\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.671536 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-apiservice-cert\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.671593 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-webhook-cert\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.671646 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st7zp\" (UniqueName: \"kubernetes.io/projected/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-kube-api-access-st7zp\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.678539 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-apiservice-cert\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.690148 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-webhook-cert\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.691345 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st7zp\" (UniqueName: \"kubernetes.io/projected/5d3dcb2a-f97d-419e-8121-c9c049e3bbad-kube-api-access-st7zp\") pod \"metallb-operator-webhook-server-767fcd8485-zvd7d\" (UID: \"5d3dcb2a-f97d-419e-8121-c9c049e3bbad\") " pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.881981 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.994441 4956 generic.go:334] "Generic (PLEG): container finished" podID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerID="b5aca53f0759ed5e19f0915b313d8f8f6c5934d9fae76673c4b3355fb5f39f32" exitCode=0 Dec 11 22:00:52 crc kubenswrapper[4956]: I1211 22:00:52.994498 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerDied","Data":"b5aca53f0759ed5e19f0915b313d8f8f6c5934d9fae76673c4b3355fb5f39f32"} Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.178258 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn"] Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.442387 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d"] Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.449187 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:53 crc kubenswrapper[4956]: W1211 22:00:53.456926 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d3dcb2a_f97d_419e_8121_c9c049e3bbad.slice/crio-7646dea7b9b4bd849131bec1a2279b132abd4cce8eadba738bf8324de7722c2f WatchSource:0}: Error finding container 7646dea7b9b4bd849131bec1a2279b132abd4cce8eadba738bf8324de7722c2f: Status 404 returned error can't find the container with id 7646dea7b9b4bd849131bec1a2279b132abd4cce8eadba738bf8324de7722c2f Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.587702 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-utilities\") pod \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.587755 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4v4d\" (UniqueName: \"kubernetes.io/projected/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-kube-api-access-v4v4d\") pod \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.587835 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-catalog-content\") pod \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\" (UID: \"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe\") " Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.589071 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-utilities" (OuterVolumeSpecName: "utilities") pod "a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" (UID: "a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.591929 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-kube-api-access-v4v4d" (OuterVolumeSpecName: "kube-api-access-v4v4d") pod "a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" (UID: "a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe"). InnerVolumeSpecName "kube-api-access-v4v4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.689636 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.689862 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4v4d\" (UniqueName: \"kubernetes.io/projected/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-kube-api-access-v4v4d\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.701505 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" (UID: "a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:00:53 crc kubenswrapper[4956]: I1211 22:00:53.791872 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.001550 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z6cd6" event={"ID":"a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe","Type":"ContainerDied","Data":"42469eb4d53ecd952138b4445530d5dfb0ca98e75c64720d0192e73c08e87de4"} Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.001584 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z6cd6" Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.001625 4956 scope.go:117] "RemoveContainer" containerID="b5aca53f0759ed5e19f0915b313d8f8f6c5934d9fae76673c4b3355fb5f39f32" Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.003047 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" event={"ID":"92356f98-4ebe-4b75-8703-1518fc3ca16a","Type":"ContainerStarted","Data":"c611c0acf547c33686b89272899238e0bfe86eb945e8dffd212ee0bdde35981c"} Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.005278 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" event={"ID":"5d3dcb2a-f97d-419e-8121-c9c049e3bbad","Type":"ContainerStarted","Data":"7646dea7b9b4bd849131bec1a2279b132abd4cce8eadba738bf8324de7722c2f"} Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.026213 4956 scope.go:117] "RemoveContainer" containerID="685477a1fe20e29961c093f3881f33ec1f060d259d0584438ab9e013c0f7671b" Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.046294 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z6cd6"] Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.049073 4956 scope.go:117] "RemoveContainer" containerID="d821dce1c8009b15222281559af44d0014653ab262beddd639e1d3477b93f883" Dec 11 22:00:54 crc kubenswrapper[4956]: I1211 22:00:54.049579 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z6cd6"] Dec 11 22:00:56 crc kubenswrapper[4956]: I1211 22:00:56.027526 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" path="/var/lib/kubelet/pods/a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe/volumes" Dec 11 22:00:59 crc kubenswrapper[4956]: I1211 22:00:59.061880 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" event={"ID":"92356f98-4ebe-4b75-8703-1518fc3ca16a","Type":"ContainerStarted","Data":"d2596daa19050b1f406106f7ee95f81e31c60a97698e3b4a81b7fb11567dbde9"} Dec 11 22:00:59 crc kubenswrapper[4956]: I1211 22:00:59.062407 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:00:59 crc kubenswrapper[4956]: I1211 22:00:59.063524 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" event={"ID":"5d3dcb2a-f97d-419e-8121-c9c049e3bbad","Type":"ContainerStarted","Data":"533ee89f2954ffbd3153484cf74cc643ec8bf5cffacbf1c7c772b5255d41439d"} Dec 11 22:00:59 crc kubenswrapper[4956]: I1211 22:00:59.063676 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:00:59 crc kubenswrapper[4956]: I1211 22:00:59.089744 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" podStartSLOduration=3.25839186 podStartE2EDuration="8.089726048s" podCreationTimestamp="2025-12-11 22:00:51 +0000 UTC" firstStartedPulling="2025-12-11 22:00:53.196491597 +0000 UTC m=+745.640869747" lastFinishedPulling="2025-12-11 22:00:58.027825785 +0000 UTC m=+750.472203935" observedRunningTime="2025-12-11 22:00:59.088163665 +0000 UTC m=+751.532541835" watchObservedRunningTime="2025-12-11 22:00:59.089726048 +0000 UTC m=+751.534104198" Dec 11 22:00:59 crc kubenswrapper[4956]: I1211 22:00:59.116913 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" podStartSLOduration=2.534038055 podStartE2EDuration="7.116895767s" podCreationTimestamp="2025-12-11 22:00:52 +0000 UTC" firstStartedPulling="2025-12-11 22:00:53.459357339 +0000 UTC m=+745.903735489" lastFinishedPulling="2025-12-11 22:00:58.042215051 +0000 UTC m=+750.486593201" observedRunningTime="2025-12-11 22:00:59.113795692 +0000 UTC m=+751.558173842" watchObservedRunningTime="2025-12-11 22:00:59.116895767 +0000 UTC m=+751.561273917" Dec 11 22:01:12 crc kubenswrapper[4956]: I1211 22:01:12.888194 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-767fcd8485-zvd7d" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.210603 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-59698c9c66-vrvkn" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.968516 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk"] Dec 11 22:01:32 crc kubenswrapper[4956]: E1211 22:01:32.969362 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="registry-server" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.969414 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="registry-server" Dec 11 22:01:32 crc kubenswrapper[4956]: E1211 22:01:32.969426 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="extract-utilities" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.969436 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="extract-utilities" Dec 11 22:01:32 crc kubenswrapper[4956]: E1211 22:01:32.969464 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="extract-content" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.969472 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="extract-content" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.976094 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a003c861-9ed1-46ac-8fd0-2cd0ec7e2afe" containerName="registry-server" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.976761 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-98gk5"] Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.976872 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.978642 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-85vqw" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.979826 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.982480 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.982556 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk"] Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.984182 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 11 22:01:32 crc kubenswrapper[4956]: I1211 22:01:32.984426 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.047827 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-n474l"] Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.048672 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.051011 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.051251 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-zhbqv" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.051311 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.051423 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.078098 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-228xh"] Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.078955 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.080797 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.084229 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-228xh"] Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.131598 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-reloader\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.131645 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8wh6\" (UniqueName: \"kubernetes.io/projected/cf42ac15-e428-4c85-a2fc-25819760ec60-kube-api-access-b8wh6\") pod \"frr-k8s-webhook-server-7784b6fcf-6xxgk\" (UID: \"cf42ac15-e428-4c85-a2fc-25819760ec60\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.131668 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-startup\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.131695 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-conf\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.131733 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-sockets\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.131752 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wslqn\" (UniqueName: \"kubernetes.io/projected/d6ca2e06-8bbd-43dc-8945-3004713f92cb-kube-api-access-wslqn\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.132171 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cf42ac15-e428-4c85-a2fc-25819760ec60-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-6xxgk\" (UID: \"cf42ac15-e428-4c85-a2fc-25819760ec60\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.132194 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-metrics\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.132207 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6ca2e06-8bbd-43dc-8945-3004713f92cb-metrics-certs\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.232992 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cf42ac15-e428-4c85-a2fc-25819760ec60-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-6xxgk\" (UID: \"cf42ac15-e428-4c85-a2fc-25819760ec60\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233042 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-metrics\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233063 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6ca2e06-8bbd-43dc-8945-3004713f92cb-metrics-certs\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233091 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8t95\" (UniqueName: \"kubernetes.io/projected/31d06e79-18e3-4d0b-a871-365f9f2ee701-kube-api-access-t8t95\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233116 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-reloader\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233132 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8wh6\" (UniqueName: \"kubernetes.io/projected/cf42ac15-e428-4c85-a2fc-25819760ec60-kube-api-access-b8wh6\") pod \"frr-k8s-webhook-server-7784b6fcf-6xxgk\" (UID: \"cf42ac15-e428-4c85-a2fc-25819760ec60\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233150 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31d06e79-18e3-4d0b-a871-365f9f2ee701-metallb-excludel2\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233167 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-startup\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233191 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233209 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-conf\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233230 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-cert\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233256 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpcxd\" (UniqueName: \"kubernetes.io/projected/8a6e6699-b773-4761-8438-23abc4eedb21-kube-api-access-rpcxd\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233276 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-sockets\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233290 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-metrics-certs\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233307 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wslqn\" (UniqueName: \"kubernetes.io/projected/d6ca2e06-8bbd-43dc-8945-3004713f92cb-kube-api-access-wslqn\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.233327 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-metrics-certs\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.234162 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-metrics\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.234516 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-startup\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.234736 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-reloader\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.235226 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-conf\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.235476 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d6ca2e06-8bbd-43dc-8945-3004713f92cb-frr-sockets\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.241105 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cf42ac15-e428-4c85-a2fc-25819760ec60-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-6xxgk\" (UID: \"cf42ac15-e428-4c85-a2fc-25819760ec60\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.243326 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d6ca2e06-8bbd-43dc-8945-3004713f92cb-metrics-certs\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.262896 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wslqn\" (UniqueName: \"kubernetes.io/projected/d6ca2e06-8bbd-43dc-8945-3004713f92cb-kube-api-access-wslqn\") pod \"frr-k8s-98gk5\" (UID: \"d6ca2e06-8bbd-43dc-8945-3004713f92cb\") " pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.300587 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8wh6\" (UniqueName: \"kubernetes.io/projected/cf42ac15-e428-4c85-a2fc-25819760ec60-kube-api-access-b8wh6\") pod \"frr-k8s-webhook-server-7784b6fcf-6xxgk\" (UID: \"cf42ac15-e428-4c85-a2fc-25819760ec60\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.315390 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.322312 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336586 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336644 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-cert\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336685 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpcxd\" (UniqueName: \"kubernetes.io/projected/8a6e6699-b773-4761-8438-23abc4eedb21-kube-api-access-rpcxd\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336711 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-metrics-certs\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: E1211 22:01:33.336738 4956 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 11 22:01:33 crc kubenswrapper[4956]: E1211 22:01:33.336824 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist podName:31d06e79-18e3-4d0b-a871-365f9f2ee701 nodeName:}" failed. No retries permitted until 2025-12-11 22:01:33.836803151 +0000 UTC m=+786.281181301 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist") pod "speaker-n474l" (UID: "31d06e79-18e3-4d0b-a871-365f9f2ee701") : secret "metallb-memberlist" not found Dec 11 22:01:33 crc kubenswrapper[4956]: E1211 22:01:33.336828 4956 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 11 22:01:33 crc kubenswrapper[4956]: E1211 22:01:33.336882 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-metrics-certs podName:8a6e6699-b773-4761-8438-23abc4eedb21 nodeName:}" failed. No retries permitted until 2025-12-11 22:01:33.836872283 +0000 UTC m=+786.281250443 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-metrics-certs") pod "controller-5bddd4b946-228xh" (UID: "8a6e6699-b773-4761-8438-23abc4eedb21") : secret "controller-certs-secret" not found Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336742 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-metrics-certs\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336954 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8t95\" (UniqueName: \"kubernetes.io/projected/31d06e79-18e3-4d0b-a871-365f9f2ee701-kube-api-access-t8t95\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.336996 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31d06e79-18e3-4d0b-a871-365f9f2ee701-metallb-excludel2\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.337846 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/31d06e79-18e3-4d0b-a871-365f9f2ee701-metallb-excludel2\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.340877 4956 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.342239 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-metrics-certs\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.354518 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-cert\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.357371 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpcxd\" (UniqueName: \"kubernetes.io/projected/8a6e6699-b773-4761-8438-23abc4eedb21-kube-api-access-rpcxd\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.359664 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8t95\" (UniqueName: \"kubernetes.io/projected/31d06e79-18e3-4d0b-a871-365f9f2ee701-kube-api-access-t8t95\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.521141 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk"] Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.844605 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.845087 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-metrics-certs\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: E1211 22:01:33.844879 4956 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 11 22:01:33 crc kubenswrapper[4956]: E1211 22:01:33.845823 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist podName:31d06e79-18e3-4d0b-a871-365f9f2ee701 nodeName:}" failed. No retries permitted until 2025-12-11 22:01:34.845223695 +0000 UTC m=+787.289601885 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist") pod "speaker-n474l" (UID: "31d06e79-18e3-4d0b-a871-365f9f2ee701") : secret "metallb-memberlist" not found Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.853018 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a6e6699-b773-4761-8438-23abc4eedb21-metrics-certs\") pod \"controller-5bddd4b946-228xh\" (UID: \"8a6e6699-b773-4761-8438-23abc4eedb21\") " pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:33 crc kubenswrapper[4956]: I1211 22:01:33.995513 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.238012 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-228xh"] Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.253926 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" event={"ID":"cf42ac15-e428-4c85-a2fc-25819760ec60","Type":"ContainerStarted","Data":"e3ce69ff7e0be7f9d257f152e0a5aebaa91bd08ba0528fd071b87e2019cd2e44"} Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.255575 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-228xh" event={"ID":"8a6e6699-b773-4761-8438-23abc4eedb21","Type":"ContainerStarted","Data":"58f8ff25d67a4a7490d85673cd581fa0712821451ef66be0d3cebbd2f76e2731"} Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.256585 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"d0d390d5d3389aa2091743110f1237c31509af29cb3ae078571991c1d878806b"} Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.856079 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.864114 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/31d06e79-18e3-4d0b-a871-365f9f2ee701-memberlist\") pod \"speaker-n474l\" (UID: \"31d06e79-18e3-4d0b-a871-365f9f2ee701\") " pod="metallb-system/speaker-n474l" Dec 11 22:01:34 crc kubenswrapper[4956]: I1211 22:01:34.866018 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-n474l" Dec 11 22:01:34 crc kubenswrapper[4956]: W1211 22:01:34.889625 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31d06e79_18e3_4d0b_a871_365f9f2ee701.slice/crio-a9d28c43959d4c4ceef9f9117954d905503335b1abcd71869f64631d895df2df WatchSource:0}: Error finding container a9d28c43959d4c4ceef9f9117954d905503335b1abcd71869f64631d895df2df: Status 404 returned error can't find the container with id a9d28c43959d4c4ceef9f9117954d905503335b1abcd71869f64631d895df2df Dec 11 22:01:35 crc kubenswrapper[4956]: I1211 22:01:35.266117 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-n474l" event={"ID":"31d06e79-18e3-4d0b-a871-365f9f2ee701","Type":"ContainerStarted","Data":"00683fe17e553e3c9f5dcc41dc51413dc167670471a8c5cc4f490437007c7036"} Dec 11 22:01:35 crc kubenswrapper[4956]: I1211 22:01:35.266478 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-n474l" event={"ID":"31d06e79-18e3-4d0b-a871-365f9f2ee701","Type":"ContainerStarted","Data":"a9d28c43959d4c4ceef9f9117954d905503335b1abcd71869f64631d895df2df"} Dec 11 22:01:35 crc kubenswrapper[4956]: I1211 22:01:35.268702 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-228xh" event={"ID":"8a6e6699-b773-4761-8438-23abc4eedb21","Type":"ContainerStarted","Data":"eb2d714b4219457d78652716f58b9ba83f8a9c8d649b85da3bdd6bc6f03c6a94"} Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.340968 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-228xh" event={"ID":"8a6e6699-b773-4761-8438-23abc4eedb21","Type":"ContainerStarted","Data":"25f571f172e6972b340078a312cded81ef3b24ad7628ef77b40276e3e352eaec"} Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.341667 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.347055 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-228xh" Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.355380 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-n474l" event={"ID":"31d06e79-18e3-4d0b-a871-365f9f2ee701","Type":"ContainerStarted","Data":"46c248c7cefbe81a3e9b45f5c658d7bf311e5384a589ad33378b2c92d2ed7c3a"} Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.355928 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-n474l" Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.357468 4956 generic.go:334] "Generic (PLEG): container finished" podID="d6ca2e06-8bbd-43dc-8945-3004713f92cb" containerID="df1a7fc1394750adeb0a9229f16425c2660274dcbef9f895a7b6ac0151964ae9" exitCode=0 Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.357554 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerDied","Data":"df1a7fc1394750adeb0a9229f16425c2660274dcbef9f895a7b6ac0151964ae9"} Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.359216 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" event={"ID":"cf42ac15-e428-4c85-a2fc-25819760ec60","Type":"ContainerStarted","Data":"c52fb740bc319b4525d0ee2dea27600a437a923ca10741faf8a825e5bbb2e2ee"} Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.359536 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.371294 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-228xh" podStartSLOduration=2.7110928359999997 podStartE2EDuration="11.371252438s" podCreationTimestamp="2025-12-11 22:01:33 +0000 UTC" firstStartedPulling="2025-12-11 22:01:34.467903065 +0000 UTC m=+786.912281215" lastFinishedPulling="2025-12-11 22:01:43.128062677 +0000 UTC m=+795.572440817" observedRunningTime="2025-12-11 22:01:44.370700994 +0000 UTC m=+796.815079154" watchObservedRunningTime="2025-12-11 22:01:44.371252438 +0000 UTC m=+796.815630588" Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.402180 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-n474l" podStartSLOduration=3.470415017 podStartE2EDuration="11.402160363s" podCreationTimestamp="2025-12-11 22:01:33 +0000 UTC" firstStartedPulling="2025-12-11 22:01:35.189314752 +0000 UTC m=+787.633692902" lastFinishedPulling="2025-12-11 22:01:43.121060098 +0000 UTC m=+795.565438248" observedRunningTime="2025-12-11 22:01:44.386352306 +0000 UTC m=+796.830730476" watchObservedRunningTime="2025-12-11 22:01:44.402160363 +0000 UTC m=+796.846538513" Dec 11 22:01:44 crc kubenswrapper[4956]: I1211 22:01:44.420389 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" podStartSLOduration=2.789296917 podStartE2EDuration="12.420369415s" podCreationTimestamp="2025-12-11 22:01:32 +0000 UTC" firstStartedPulling="2025-12-11 22:01:33.533954617 +0000 UTC m=+785.978332757" lastFinishedPulling="2025-12-11 22:01:43.165027105 +0000 UTC m=+795.609405255" observedRunningTime="2025-12-11 22:01:44.417120958 +0000 UTC m=+796.861499138" watchObservedRunningTime="2025-12-11 22:01:44.420369415 +0000 UTC m=+796.864747575" Dec 11 22:01:45 crc kubenswrapper[4956]: I1211 22:01:45.366015 4956 generic.go:334] "Generic (PLEG): container finished" podID="d6ca2e06-8bbd-43dc-8945-3004713f92cb" containerID="87e4a3250c91ad8afbb1531e79a8ba67d23ceeed4e59441ed6ad121383659f31" exitCode=0 Dec 11 22:01:45 crc kubenswrapper[4956]: I1211 22:01:45.366076 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerDied","Data":"87e4a3250c91ad8afbb1531e79a8ba67d23ceeed4e59441ed6ad121383659f31"} Dec 11 22:01:45 crc kubenswrapper[4956]: I1211 22:01:45.370007 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-n474l" Dec 11 22:01:46 crc kubenswrapper[4956]: I1211 22:01:46.374035 4956 generic.go:334] "Generic (PLEG): container finished" podID="d6ca2e06-8bbd-43dc-8945-3004713f92cb" containerID="280fd5b892eb0787efb69a0818c76ded0fae5706d5f8d78d1bf7f3df6f16e2e6" exitCode=0 Dec 11 22:01:46 crc kubenswrapper[4956]: I1211 22:01:46.374137 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerDied","Data":"280fd5b892eb0787efb69a0818c76ded0fae5706d5f8d78d1bf7f3df6f16e2e6"} Dec 11 22:01:46 crc kubenswrapper[4956]: I1211 22:01:46.889216 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:01:46 crc kubenswrapper[4956]: I1211 22:01:46.889567 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.395807 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"b7fcfb3e82b72fda568a35f2d5fe72555b41dcbbf117a5ad1d43a52ba8803c84"} Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.395888 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"e2d6d3436aacdd8ee9567c89e642b0aaeb70c6135215ce1f21725f0c5450c16f"} Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.395902 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"7c53713792a75880c988f69c4fae21d39c04c9c80257be996f9034398fcdfb6e"} Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.395954 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"30fe46976ef24b371260f430d36fe691ea15c6c7f174721bba1100060b2f6a82"} Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.395967 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"f534521ad832f771f5bf305d0d232c135b610cc4ba5d70d477718c1ce2229abe"} Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.395979 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-98gk5" event={"ID":"d6ca2e06-8bbd-43dc-8945-3004713f92cb","Type":"ContainerStarted","Data":"c2cc675ccf89b85ae4a096693d942f06a3d90a0de1154f9105448c0f8d08b2b0"} Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.396042 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:47 crc kubenswrapper[4956]: I1211 22:01:47.426811 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-98gk5" podStartSLOduration=5.776736114 podStartE2EDuration="15.426794135s" podCreationTimestamp="2025-12-11 22:01:32 +0000 UTC" firstStartedPulling="2025-12-11 22:01:33.552315323 +0000 UTC m=+785.996693473" lastFinishedPulling="2025-12-11 22:01:43.202373344 +0000 UTC m=+795.646751494" observedRunningTime="2025-12-11 22:01:47.424918345 +0000 UTC m=+799.869296495" watchObservedRunningTime="2025-12-11 22:01:47.426794135 +0000 UTC m=+799.871172295" Dec 11 22:01:48 crc kubenswrapper[4956]: I1211 22:01:48.323044 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:48 crc kubenswrapper[4956]: I1211 22:01:48.431518 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-98gk5" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.114627 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-6r6hf"] Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.115961 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.118493 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-l6gqt" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.118518 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.118694 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.119914 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-6r6hf"] Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.300244 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sztlp\" (UniqueName: \"kubernetes.io/projected/d0f6ee1d-38b7-461c-9283-8246e7f17676-kube-api-access-sztlp\") pod \"mariadb-operator-index-6r6hf\" (UID: \"d0f6ee1d-38b7-461c-9283-8246e7f17676\") " pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.348513 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-6xxgk" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.401294 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sztlp\" (UniqueName: \"kubernetes.io/projected/d0f6ee1d-38b7-461c-9283-8246e7f17676-kube-api-access-sztlp\") pod \"mariadb-operator-index-6r6hf\" (UID: \"d0f6ee1d-38b7-461c-9283-8246e7f17676\") " pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.428571 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sztlp\" (UniqueName: \"kubernetes.io/projected/d0f6ee1d-38b7-461c-9283-8246e7f17676-kube-api-access-sztlp\") pod \"mariadb-operator-index-6r6hf\" (UID: \"d0f6ee1d-38b7-461c-9283-8246e7f17676\") " pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.433318 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:53 crc kubenswrapper[4956]: I1211 22:01:53.720162 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-6r6hf"] Dec 11 22:01:54 crc kubenswrapper[4956]: I1211 22:01:54.436240 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6r6hf" event={"ID":"d0f6ee1d-38b7-461c-9283-8246e7f17676","Type":"ContainerStarted","Data":"a4c53c65af1900988ec4255573e60dc44891c856942d265e4e1c45ecbd48bf8e"} Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.126028 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-6r6hf"] Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.454446 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6r6hf" event={"ID":"d0f6ee1d-38b7-461c-9283-8246e7f17676","Type":"ContainerStarted","Data":"3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b"} Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.725845 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-6r6hf" podStartSLOduration=3.036943534 podStartE2EDuration="4.725826754s" podCreationTimestamp="2025-12-11 22:01:52 +0000 UTC" firstStartedPulling="2025-12-11 22:01:53.736755853 +0000 UTC m=+806.181134003" lastFinishedPulling="2025-12-11 22:01:55.425639073 +0000 UTC m=+807.870017223" observedRunningTime="2025-12-11 22:01:56.475175554 +0000 UTC m=+808.919553784" watchObservedRunningTime="2025-12-11 22:01:56.725826754 +0000 UTC m=+809.170204904" Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.729001 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-2gr5s"] Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.729756 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.739817 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-2gr5s"] Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.842434 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52nmt\" (UniqueName: \"kubernetes.io/projected/e625ed2d-e5fa-44e0-9388-9a3df15fe132-kube-api-access-52nmt\") pod \"mariadb-operator-index-2gr5s\" (UID: \"e625ed2d-e5fa-44e0-9388-9a3df15fe132\") " pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.944875 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52nmt\" (UniqueName: \"kubernetes.io/projected/e625ed2d-e5fa-44e0-9388-9a3df15fe132-kube-api-access-52nmt\") pod \"mariadb-operator-index-2gr5s\" (UID: \"e625ed2d-e5fa-44e0-9388-9a3df15fe132\") " pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:01:56 crc kubenswrapper[4956]: I1211 22:01:56.964123 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52nmt\" (UniqueName: \"kubernetes.io/projected/e625ed2d-e5fa-44e0-9388-9a3df15fe132-kube-api-access-52nmt\") pod \"mariadb-operator-index-2gr5s\" (UID: \"e625ed2d-e5fa-44e0-9388-9a3df15fe132\") " pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.046539 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.256646 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-2gr5s"] Dec 11 22:01:57 crc kubenswrapper[4956]: W1211 22:01:57.261723 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode625ed2d_e5fa_44e0_9388_9a3df15fe132.slice/crio-cf18cd17a6810b4dc182674a5a6e97722ce84b57f8c19858cf65dc03a8ef40a3 WatchSource:0}: Error finding container cf18cd17a6810b4dc182674a5a6e97722ce84b57f8c19858cf65dc03a8ef40a3: Status 404 returned error can't find the container with id cf18cd17a6810b4dc182674a5a6e97722ce84b57f8c19858cf65dc03a8ef40a3 Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.461955 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-2gr5s" event={"ID":"e625ed2d-e5fa-44e0-9388-9a3df15fe132","Type":"ContainerStarted","Data":"cf18cd17a6810b4dc182674a5a6e97722ce84b57f8c19858cf65dc03a8ef40a3"} Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.462073 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-6r6hf" podUID="d0f6ee1d-38b7-461c-9283-8246e7f17676" containerName="registry-server" containerID="cri-o://3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b" gracePeriod=2 Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.797287 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.861720 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sztlp\" (UniqueName: \"kubernetes.io/projected/d0f6ee1d-38b7-461c-9283-8246e7f17676-kube-api-access-sztlp\") pod \"d0f6ee1d-38b7-461c-9283-8246e7f17676\" (UID: \"d0f6ee1d-38b7-461c-9283-8246e7f17676\") " Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.866634 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f6ee1d-38b7-461c-9283-8246e7f17676-kube-api-access-sztlp" (OuterVolumeSpecName: "kube-api-access-sztlp") pod "d0f6ee1d-38b7-461c-9283-8246e7f17676" (UID: "d0f6ee1d-38b7-461c-9283-8246e7f17676"). InnerVolumeSpecName "kube-api-access-sztlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:01:57 crc kubenswrapper[4956]: I1211 22:01:57.962355 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sztlp\" (UniqueName: \"kubernetes.io/projected/d0f6ee1d-38b7-461c-9283-8246e7f17676-kube-api-access-sztlp\") on node \"crc\" DevicePath \"\"" Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.477120 4956 generic.go:334] "Generic (PLEG): container finished" podID="d0f6ee1d-38b7-461c-9283-8246e7f17676" containerID="3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b" exitCode=0 Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.477303 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6r6hf" event={"ID":"d0f6ee1d-38b7-461c-9283-8246e7f17676","Type":"ContainerDied","Data":"3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b"} Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.477347 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6r6hf" event={"ID":"d0f6ee1d-38b7-461c-9283-8246e7f17676","Type":"ContainerDied","Data":"a4c53c65af1900988ec4255573e60dc44891c856942d265e4e1c45ecbd48bf8e"} Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.477370 4956 scope.go:117] "RemoveContainer" containerID="3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b" Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.477542 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6r6hf" Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.481320 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-2gr5s" event={"ID":"e625ed2d-e5fa-44e0-9388-9a3df15fe132","Type":"ContainerStarted","Data":"fd5aa8f657ed68c0c3a3ac28e8b4317b06ce2be1f79ad4b126e67cf61294032f"} Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.494684 4956 scope.go:117] "RemoveContainer" containerID="3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b" Dec 11 22:01:58 crc kubenswrapper[4956]: E1211 22:01:58.495292 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b\": container with ID starting with 3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b not found: ID does not exist" containerID="3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b" Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.495357 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b"} err="failed to get container status \"3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b\": rpc error: code = NotFound desc = could not find container \"3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b\": container with ID starting with 3c0c2d096b9e197dc6c318ec75f75e2a26dcc371c9b5925aa8487e997074841b not found: ID does not exist" Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.498427 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-2gr5s" podStartSLOduration=1.6968364839999999 podStartE2EDuration="2.498396976s" podCreationTimestamp="2025-12-11 22:01:56 +0000 UTC" firstStartedPulling="2025-12-11 22:01:57.266014926 +0000 UTC m=+809.710393076" lastFinishedPulling="2025-12-11 22:01:58.067575418 +0000 UTC m=+810.511953568" observedRunningTime="2025-12-11 22:01:58.497745848 +0000 UTC m=+810.942124008" watchObservedRunningTime="2025-12-11 22:01:58.498396976 +0000 UTC m=+810.942775136" Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.513105 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-6r6hf"] Dec 11 22:01:58 crc kubenswrapper[4956]: I1211 22:01:58.517949 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-6r6hf"] Dec 11 22:02:00 crc kubenswrapper[4956]: I1211 22:02:00.029587 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0f6ee1d-38b7-461c-9283-8246e7f17676" path="/var/lib/kubelet/pods/d0f6ee1d-38b7-461c-9283-8246e7f17676/volumes" Dec 11 22:02:03 crc kubenswrapper[4956]: I1211 22:02:03.329064 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-98gk5" Dec 11 22:02:07 crc kubenswrapper[4956]: I1211 22:02:07.046693 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:02:07 crc kubenswrapper[4956]: I1211 22:02:07.047119 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:02:07 crc kubenswrapper[4956]: I1211 22:02:07.081578 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:02:07 crc kubenswrapper[4956]: I1211 22:02:07.594990 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-2gr5s" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.186858 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5"] Dec 11 22:02:09 crc kubenswrapper[4956]: E1211 22:02:09.187928 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f6ee1d-38b7-461c-9283-8246e7f17676" containerName="registry-server" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.187960 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f6ee1d-38b7-461c-9283-8246e7f17676" containerName="registry-server" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.188193 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f6ee1d-38b7-461c-9283-8246e7f17676" containerName="registry-server" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.189646 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.201336 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5"] Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.206677 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7p6h2" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.226173 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-util\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.226463 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkgxj\" (UniqueName: \"kubernetes.io/projected/4d696972-48a5-4dd3-8a23-a320ea760628-kube-api-access-mkgxj\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.226859 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-bundle\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.328758 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-bundle\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.328932 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-util\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.328972 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkgxj\" (UniqueName: \"kubernetes.io/projected/4d696972-48a5-4dd3-8a23-a320ea760628-kube-api-access-mkgxj\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.329342 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-bundle\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.329345 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-util\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.350313 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkgxj\" (UniqueName: \"kubernetes.io/projected/4d696972-48a5-4dd3-8a23-a320ea760628-kube-api-access-mkgxj\") pod \"c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.512472 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:09 crc kubenswrapper[4956]: I1211 22:02:09.939726 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5"] Dec 11 22:02:09 crc kubenswrapper[4956]: W1211 22:02:09.942788 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d696972_48a5_4dd3_8a23_a320ea760628.slice/crio-c61ba99ecbe1e2e813ef48cdc993111677b2b2122c3149213e1ffa650d004700 WatchSource:0}: Error finding container c61ba99ecbe1e2e813ef48cdc993111677b2b2122c3149213e1ffa650d004700: Status 404 returned error can't find the container with id c61ba99ecbe1e2e813ef48cdc993111677b2b2122c3149213e1ffa650d004700 Dec 11 22:02:10 crc kubenswrapper[4956]: I1211 22:02:10.560445 4956 generic.go:334] "Generic (PLEG): container finished" podID="4d696972-48a5-4dd3-8a23-a320ea760628" containerID="30958c39fe199f1de669ec32b040dee4fcb2ef4048f26130e6af5dd503824693" exitCode=0 Dec 11 22:02:10 crc kubenswrapper[4956]: I1211 22:02:10.560507 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" event={"ID":"4d696972-48a5-4dd3-8a23-a320ea760628","Type":"ContainerDied","Data":"30958c39fe199f1de669ec32b040dee4fcb2ef4048f26130e6af5dd503824693"} Dec 11 22:02:10 crc kubenswrapper[4956]: I1211 22:02:10.560788 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" event={"ID":"4d696972-48a5-4dd3-8a23-a320ea760628","Type":"ContainerStarted","Data":"c61ba99ecbe1e2e813ef48cdc993111677b2b2122c3149213e1ffa650d004700"} Dec 11 22:02:12 crc kubenswrapper[4956]: I1211 22:02:12.581355 4956 generic.go:334] "Generic (PLEG): container finished" podID="4d696972-48a5-4dd3-8a23-a320ea760628" containerID="89e2cd2fc0253d0f66d05fc52e584317c52ec9287e64455192f018e08c192557" exitCode=0 Dec 11 22:02:12 crc kubenswrapper[4956]: I1211 22:02:12.581422 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" event={"ID":"4d696972-48a5-4dd3-8a23-a320ea760628","Type":"ContainerDied","Data":"89e2cd2fc0253d0f66d05fc52e584317c52ec9287e64455192f018e08c192557"} Dec 11 22:02:13 crc kubenswrapper[4956]: I1211 22:02:13.588712 4956 generic.go:334] "Generic (PLEG): container finished" podID="4d696972-48a5-4dd3-8a23-a320ea760628" containerID="14ed8fad8564b40d99769705e8d84d0908b8ba4e425d92f7dfb7b549df2358aa" exitCode=0 Dec 11 22:02:13 crc kubenswrapper[4956]: I1211 22:02:13.588828 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" event={"ID":"4d696972-48a5-4dd3-8a23-a320ea760628","Type":"ContainerDied","Data":"14ed8fad8564b40d99769705e8d84d0908b8ba4e425d92f7dfb7b549df2358aa"} Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.842976 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.908340 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-util\") pod \"4d696972-48a5-4dd3-8a23-a320ea760628\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.908403 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-bundle\") pod \"4d696972-48a5-4dd3-8a23-a320ea760628\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.908494 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkgxj\" (UniqueName: \"kubernetes.io/projected/4d696972-48a5-4dd3-8a23-a320ea760628-kube-api-access-mkgxj\") pod \"4d696972-48a5-4dd3-8a23-a320ea760628\" (UID: \"4d696972-48a5-4dd3-8a23-a320ea760628\") " Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.909565 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-bundle" (OuterVolumeSpecName: "bundle") pod "4d696972-48a5-4dd3-8a23-a320ea760628" (UID: "4d696972-48a5-4dd3-8a23-a320ea760628"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.915994 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d696972-48a5-4dd3-8a23-a320ea760628-kube-api-access-mkgxj" (OuterVolumeSpecName: "kube-api-access-mkgxj") pod "4d696972-48a5-4dd3-8a23-a320ea760628" (UID: "4d696972-48a5-4dd3-8a23-a320ea760628"). InnerVolumeSpecName "kube-api-access-mkgxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:02:14 crc kubenswrapper[4956]: I1211 22:02:14.921890 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-util" (OuterVolumeSpecName: "util") pod "4d696972-48a5-4dd3-8a23-a320ea760628" (UID: "4d696972-48a5-4dd3-8a23-a320ea760628"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:02:15 crc kubenswrapper[4956]: I1211 22:02:15.010498 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:15 crc kubenswrapper[4956]: I1211 22:02:15.010537 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d696972-48a5-4dd3-8a23-a320ea760628-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:15 crc kubenswrapper[4956]: I1211 22:02:15.010550 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkgxj\" (UniqueName: \"kubernetes.io/projected/4d696972-48a5-4dd3-8a23-a320ea760628-kube-api-access-mkgxj\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:15 crc kubenswrapper[4956]: I1211 22:02:15.603563 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" event={"ID":"4d696972-48a5-4dd3-8a23-a320ea760628","Type":"ContainerDied","Data":"c61ba99ecbe1e2e813ef48cdc993111677b2b2122c3149213e1ffa650d004700"} Dec 11 22:02:15 crc kubenswrapper[4956]: I1211 22:02:15.603604 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c61ba99ecbe1e2e813ef48cdc993111677b2b2122c3149213e1ffa650d004700" Dec 11 22:02:15 crc kubenswrapper[4956]: I1211 22:02:15.603644 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5" Dec 11 22:02:16 crc kubenswrapper[4956]: I1211 22:02:16.887939 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:02:16 crc kubenswrapper[4956]: I1211 22:02:16.888002 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.161381 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6"] Dec 11 22:02:22 crc kubenswrapper[4956]: E1211 22:02:22.161990 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="pull" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.162006 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="pull" Dec 11 22:02:22 crc kubenswrapper[4956]: E1211 22:02:22.162034 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="util" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.162042 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="util" Dec 11 22:02:22 crc kubenswrapper[4956]: E1211 22:02:22.162054 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="extract" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.162062 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="extract" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.162189 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d696972-48a5-4dd3-8a23-a320ea760628" containerName="extract" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.162651 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.164280 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4k9gw" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.164451 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.164521 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.177661 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6"] Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.181229 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e6567d3b-2f90-413a-b7fc-edd972521754-apiservice-cert\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.181303 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwrjs\" (UniqueName: \"kubernetes.io/projected/e6567d3b-2f90-413a-b7fc-edd972521754-kube-api-access-jwrjs\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.181384 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e6567d3b-2f90-413a-b7fc-edd972521754-webhook-cert\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.282798 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e6567d3b-2f90-413a-b7fc-edd972521754-webhook-cert\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.282854 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e6567d3b-2f90-413a-b7fc-edd972521754-apiservice-cert\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.282899 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwrjs\" (UniqueName: \"kubernetes.io/projected/e6567d3b-2f90-413a-b7fc-edd972521754-kube-api-access-jwrjs\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.294541 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e6567d3b-2f90-413a-b7fc-edd972521754-apiservice-cert\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.295719 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e6567d3b-2f90-413a-b7fc-edd972521754-webhook-cert\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.313917 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwrjs\" (UniqueName: \"kubernetes.io/projected/e6567d3b-2f90-413a-b7fc-edd972521754-kube-api-access-jwrjs\") pod \"mariadb-operator-controller-manager-556fbb47f6-t77m6\" (UID: \"e6567d3b-2f90-413a-b7fc-edd972521754\") " pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.483424 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:22 crc kubenswrapper[4956]: I1211 22:02:22.677357 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6"] Dec 11 22:02:23 crc kubenswrapper[4956]: I1211 22:02:23.657857 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" event={"ID":"e6567d3b-2f90-413a-b7fc-edd972521754","Type":"ContainerStarted","Data":"ab5018cfca0de116ab822c87fce9acdb510f13f0795d4f8b67a3dcb0491e2580"} Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.448519 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vfpk2"] Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.450290 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.471553 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vfpk2"] Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.550424 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-catalog-content\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.550488 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-utilities\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.550512 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66cfw\" (UniqueName: \"kubernetes.io/projected/a198fc83-e479-47ca-9f2d-b6627aa6db2b-kube-api-access-66cfw\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.651687 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-catalog-content\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.651759 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-utilities\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.651812 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66cfw\" (UniqueName: \"kubernetes.io/projected/a198fc83-e479-47ca-9f2d-b6627aa6db2b-kube-api-access-66cfw\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.652655 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-catalog-content\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.652919 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-utilities\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.688835 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66cfw\" (UniqueName: \"kubernetes.io/projected/a198fc83-e479-47ca-9f2d-b6627aa6db2b-kube-api-access-66cfw\") pod \"community-operators-vfpk2\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:26 crc kubenswrapper[4956]: I1211 22:02:26.767718 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.402191 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vfpk2"] Dec 11 22:02:27 crc kubenswrapper[4956]: W1211 22:02:27.404104 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda198fc83_e479_47ca_9f2d_b6627aa6db2b.slice/crio-b36fa14a0f7055e4c3836bccc057f73eff5940d0b74c1d5cd46e6d763d1a7500 WatchSource:0}: Error finding container b36fa14a0f7055e4c3836bccc057f73eff5940d0b74c1d5cd46e6d763d1a7500: Status 404 returned error can't find the container with id b36fa14a0f7055e4c3836bccc057f73eff5940d0b74c1d5cd46e6d763d1a7500 Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.689647 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" event={"ID":"e6567d3b-2f90-413a-b7fc-edd972521754","Type":"ContainerStarted","Data":"8336e1d4a0fe11cbc9bbd7a852e71fec6b8fb1e44c668160e7ade7aad9aed8eb"} Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.690005 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.691209 4956 generic.go:334] "Generic (PLEG): container finished" podID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerID="03e203d6c4e567c62ca587e48cbf9d1b9e2beba564bdec74018f60b8786a6988" exitCode=0 Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.691240 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfpk2" event={"ID":"a198fc83-e479-47ca-9f2d-b6627aa6db2b","Type":"ContainerDied","Data":"03e203d6c4e567c62ca587e48cbf9d1b9e2beba564bdec74018f60b8786a6988"} Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.691260 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfpk2" event={"ID":"a198fc83-e479-47ca-9f2d-b6627aa6db2b","Type":"ContainerStarted","Data":"b36fa14a0f7055e4c3836bccc057f73eff5940d0b74c1d5cd46e6d763d1a7500"} Dec 11 22:02:27 crc kubenswrapper[4956]: I1211 22:02:27.710148 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" podStartSLOduration=1.453861828 podStartE2EDuration="5.71013199s" podCreationTimestamp="2025-12-11 22:02:22 +0000 UTC" firstStartedPulling="2025-12-11 22:02:22.681877045 +0000 UTC m=+835.126255195" lastFinishedPulling="2025-12-11 22:02:26.938147207 +0000 UTC m=+839.382525357" observedRunningTime="2025-12-11 22:02:27.705880585 +0000 UTC m=+840.150258865" watchObservedRunningTime="2025-12-11 22:02:27.71013199 +0000 UTC m=+840.154510140" Dec 11 22:02:28 crc kubenswrapper[4956]: I1211 22:02:28.698980 4956 generic.go:334] "Generic (PLEG): container finished" podID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerID="b8193cc19eaa290e97486e8edd0ed2e5b7f7c21bc8f785bf6a8c92a8ead6f938" exitCode=0 Dec 11 22:02:28 crc kubenswrapper[4956]: I1211 22:02:28.699069 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfpk2" event={"ID":"a198fc83-e479-47ca-9f2d-b6627aa6db2b","Type":"ContainerDied","Data":"b8193cc19eaa290e97486e8edd0ed2e5b7f7c21bc8f785bf6a8c92a8ead6f938"} Dec 11 22:02:29 crc kubenswrapper[4956]: I1211 22:02:29.706905 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfpk2" event={"ID":"a198fc83-e479-47ca-9f2d-b6627aa6db2b","Type":"ContainerStarted","Data":"00a4d231c2b6d902ab733b57a3775e75eef1d46edaaf28cbd6318307819bc481"} Dec 11 22:02:29 crc kubenswrapper[4956]: I1211 22:02:29.726271 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vfpk2" podStartSLOduration=2.006785563 podStartE2EDuration="3.72625076s" podCreationTimestamp="2025-12-11 22:02:26 +0000 UTC" firstStartedPulling="2025-12-11 22:02:27.69238095 +0000 UTC m=+840.136759100" lastFinishedPulling="2025-12-11 22:02:29.411846137 +0000 UTC m=+841.856224297" observedRunningTime="2025-12-11 22:02:29.722936301 +0000 UTC m=+842.167314481" watchObservedRunningTime="2025-12-11 22:02:29.72625076 +0000 UTC m=+842.170628910" Dec 11 22:02:32 crc kubenswrapper[4956]: I1211 22:02:32.488853 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-556fbb47f6-t77m6" Dec 11 22:02:36 crc kubenswrapper[4956]: I1211 22:02:36.767858 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:36 crc kubenswrapper[4956]: I1211 22:02:36.768214 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:36 crc kubenswrapper[4956]: I1211 22:02:36.808275 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.244036 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-2pv8c"] Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.245862 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.248790 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-jpmhx" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.251241 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-2pv8c"] Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.277065 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hznx9\" (UniqueName: \"kubernetes.io/projected/fddb0340-f5e5-46e1-8193-236b99c5e859-kube-api-access-hznx9\") pod \"infra-operator-index-2pv8c\" (UID: \"fddb0340-f5e5-46e1-8193-236b99c5e859\") " pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.378512 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hznx9\" (UniqueName: \"kubernetes.io/projected/fddb0340-f5e5-46e1-8193-236b99c5e859-kube-api-access-hznx9\") pod \"infra-operator-index-2pv8c\" (UID: \"fddb0340-f5e5-46e1-8193-236b99c5e859\") " pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.401416 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hznx9\" (UniqueName: \"kubernetes.io/projected/fddb0340-f5e5-46e1-8193-236b99c5e859-kube-api-access-hznx9\") pod \"infra-operator-index-2pv8c\" (UID: \"fddb0340-f5e5-46e1-8193-236b99c5e859\") " pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.577128 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.806858 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:37 crc kubenswrapper[4956]: I1211 22:02:37.867489 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-2pv8c"] Dec 11 22:02:38 crc kubenswrapper[4956]: I1211 22:02:38.774013 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2pv8c" event={"ID":"fddb0340-f5e5-46e1-8193-236b99c5e859","Type":"ContainerStarted","Data":"d7a3c0a2ee144ce96e5b67db3d54aa990d0234d889ae9bd488dbfb73e8e68692"} Dec 11 22:02:40 crc kubenswrapper[4956]: I1211 22:02:40.787678 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2pv8c" event={"ID":"fddb0340-f5e5-46e1-8193-236b99c5e859","Type":"ContainerStarted","Data":"18f98c685cd575edb5b87245a363cf36b62d6b62a7f32db8d10d6779c79abe7e"} Dec 11 22:02:41 crc kubenswrapper[4956]: I1211 22:02:41.033763 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vfpk2"] Dec 11 22:02:41 crc kubenswrapper[4956]: I1211 22:02:41.034478 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vfpk2" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="registry-server" containerID="cri-o://00a4d231c2b6d902ab733b57a3775e75eef1d46edaaf28cbd6318307819bc481" gracePeriod=2 Dec 11 22:02:41 crc kubenswrapper[4956]: I1211 22:02:41.795432 4956 generic.go:334] "Generic (PLEG): container finished" podID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerID="00a4d231c2b6d902ab733b57a3775e75eef1d46edaaf28cbd6318307819bc481" exitCode=0 Dec 11 22:02:41 crc kubenswrapper[4956]: I1211 22:02:41.796215 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfpk2" event={"ID":"a198fc83-e479-47ca-9f2d-b6627aa6db2b","Type":"ContainerDied","Data":"00a4d231c2b6d902ab733b57a3775e75eef1d46edaaf28cbd6318307819bc481"} Dec 11 22:02:41 crc kubenswrapper[4956]: I1211 22:02:41.821377 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-2pv8c" podStartSLOduration=3.490582269 podStartE2EDuration="4.821352336s" podCreationTimestamp="2025-12-11 22:02:37 +0000 UTC" firstStartedPulling="2025-12-11 22:02:37.873672041 +0000 UTC m=+850.318050191" lastFinishedPulling="2025-12-11 22:02:39.204442088 +0000 UTC m=+851.648820258" observedRunningTime="2025-12-11 22:02:41.811143371 +0000 UTC m=+854.255521541" watchObservedRunningTime="2025-12-11 22:02:41.821352336 +0000 UTC m=+854.265730506" Dec 11 22:02:41 crc kubenswrapper[4956]: I1211 22:02:41.897270 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.032100 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-catalog-content\") pod \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.032201 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-utilities\") pod \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.032256 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66cfw\" (UniqueName: \"kubernetes.io/projected/a198fc83-e479-47ca-9f2d-b6627aa6db2b-kube-api-access-66cfw\") pod \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\" (UID: \"a198fc83-e479-47ca-9f2d-b6627aa6db2b\") " Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.034070 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-utilities" (OuterVolumeSpecName: "utilities") pod "a198fc83-e479-47ca-9f2d-b6627aa6db2b" (UID: "a198fc83-e479-47ca-9f2d-b6627aa6db2b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.038901 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a198fc83-e479-47ca-9f2d-b6627aa6db2b-kube-api-access-66cfw" (OuterVolumeSpecName: "kube-api-access-66cfw") pod "a198fc83-e479-47ca-9f2d-b6627aa6db2b" (UID: "a198fc83-e479-47ca-9f2d-b6627aa6db2b"). InnerVolumeSpecName "kube-api-access-66cfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.079522 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a198fc83-e479-47ca-9f2d-b6627aa6db2b" (UID: "a198fc83-e479-47ca-9f2d-b6627aa6db2b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.133960 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.134008 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a198fc83-e479-47ca-9f2d-b6627aa6db2b-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.134025 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66cfw\" (UniqueName: \"kubernetes.io/projected/a198fc83-e479-47ca-9f2d-b6627aa6db2b-kube-api-access-66cfw\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.803280 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfpk2" event={"ID":"a198fc83-e479-47ca-9f2d-b6627aa6db2b","Type":"ContainerDied","Data":"b36fa14a0f7055e4c3836bccc057f73eff5940d0b74c1d5cd46e6d763d1a7500"} Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.803381 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfpk2" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.803647 4956 scope.go:117] "RemoveContainer" containerID="00a4d231c2b6d902ab733b57a3775e75eef1d46edaaf28cbd6318307819bc481" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.830229 4956 scope.go:117] "RemoveContainer" containerID="b8193cc19eaa290e97486e8edd0ed2e5b7f7c21bc8f785bf6a8c92a8ead6f938" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.848618 4956 scope.go:117] "RemoveContainer" containerID="03e203d6c4e567c62ca587e48cbf9d1b9e2beba564bdec74018f60b8786a6988" Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.857683 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vfpk2"] Dec 11 22:02:42 crc kubenswrapper[4956]: I1211 22:02:42.861785 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vfpk2"] Dec 11 22:02:44 crc kubenswrapper[4956]: I1211 22:02:44.030398 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" path="/var/lib/kubelet/pods/a198fc83-e479-47ca-9f2d-b6627aa6db2b/volumes" Dec 11 22:02:46 crc kubenswrapper[4956]: I1211 22:02:46.888692 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:02:46 crc kubenswrapper[4956]: I1211 22:02:46.889162 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:02:46 crc kubenswrapper[4956]: I1211 22:02:46.889231 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:02:46 crc kubenswrapper[4956]: I1211 22:02:46.890070 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4499626ee92b2b1ce574f017b854a027fdb33d8effd0a947335164f75f9ce2f0"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:02:46 crc kubenswrapper[4956]: I1211 22:02:46.890184 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://4499626ee92b2b1ce574f017b854a027fdb33d8effd0a947335164f75f9ce2f0" gracePeriod=600 Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.578141 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.578523 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.612997 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.844287 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="4499626ee92b2b1ce574f017b854a027fdb33d8effd0a947335164f75f9ce2f0" exitCode=0 Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.844355 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"4499626ee92b2b1ce574f017b854a027fdb33d8effd0a947335164f75f9ce2f0"} Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.844386 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"16eb3ed064bf8b2d4bc79eb3f1d7745450b60887fc5c6da806964966eb18a92c"} Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.844402 4956 scope.go:117] "RemoveContainer" containerID="2913b125d5d4273e7a22b870a21fcdd6061910016396e2d50d698aaca6bf5cbe" Dec 11 22:02:47 crc kubenswrapper[4956]: I1211 22:02:47.875895 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-2pv8c" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.301637 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7"] Dec 11 22:02:49 crc kubenswrapper[4956]: E1211 22:02:49.302239 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="extract-content" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.302255 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="extract-content" Dec 11 22:02:49 crc kubenswrapper[4956]: E1211 22:02:49.302274 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="extract-utilities" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.302282 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="extract-utilities" Dec 11 22:02:49 crc kubenswrapper[4956]: E1211 22:02:49.302294 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="registry-server" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.302305 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="registry-server" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.302442 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a198fc83-e479-47ca-9f2d-b6627aa6db2b" containerName="registry-server" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.303467 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.305818 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7p6h2" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.318813 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7"] Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.496692 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr5qk\" (UniqueName: \"kubernetes.io/projected/7a89c45b-e195-4012-a532-aa4430a52d63-kube-api-access-fr5qk\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.496811 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-util\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.496858 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-bundle\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.597935 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr5qk\" (UniqueName: \"kubernetes.io/projected/7a89c45b-e195-4012-a532-aa4430a52d63-kube-api-access-fr5qk\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.598081 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-util\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.598155 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-bundle\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.599037 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-bundle\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.600046 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-util\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.635605 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr5qk\" (UniqueName: \"kubernetes.io/projected/7a89c45b-e195-4012-a532-aa4430a52d63-kube-api-access-fr5qk\") pod \"a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:49 crc kubenswrapper[4956]: I1211 22:02:49.924037 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:50 crc kubenswrapper[4956]: I1211 22:02:50.157116 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7"] Dec 11 22:02:50 crc kubenswrapper[4956]: I1211 22:02:50.868518 4956 generic.go:334] "Generic (PLEG): container finished" podID="7a89c45b-e195-4012-a532-aa4430a52d63" containerID="5665e682ce51a832d37ba467febb820588c7d4cd67526cd1fd371c64a1ff254e" exitCode=0 Dec 11 22:02:50 crc kubenswrapper[4956]: I1211 22:02:50.868642 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" event={"ID":"7a89c45b-e195-4012-a532-aa4430a52d63","Type":"ContainerDied","Data":"5665e682ce51a832d37ba467febb820588c7d4cd67526cd1fd371c64a1ff254e"} Dec 11 22:02:50 crc kubenswrapper[4956]: I1211 22:02:50.868913 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" event={"ID":"7a89c45b-e195-4012-a532-aa4430a52d63","Type":"ContainerStarted","Data":"65c2954fcccb8fc9602e3d55dd3f24088911362d05df844938ba87d7567c8521"} Dec 11 22:02:51 crc kubenswrapper[4956]: I1211 22:02:51.879643 4956 generic.go:334] "Generic (PLEG): container finished" podID="7a89c45b-e195-4012-a532-aa4430a52d63" containerID="5d2bc9a7cfffd438d4abdb4ecb6ff6aeaa38de028a09a6139e3e5792ab4b43f1" exitCode=0 Dec 11 22:02:51 crc kubenswrapper[4956]: I1211 22:02:51.880487 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" event={"ID":"7a89c45b-e195-4012-a532-aa4430a52d63","Type":"ContainerDied","Data":"5d2bc9a7cfffd438d4abdb4ecb6ff6aeaa38de028a09a6139e3e5792ab4b43f1"} Dec 11 22:02:53 crc kubenswrapper[4956]: I1211 22:02:52.890045 4956 generic.go:334] "Generic (PLEG): container finished" podID="7a89c45b-e195-4012-a532-aa4430a52d63" containerID="9620b6847164f514b527752b87f5b72e3c95fcbb09de35b84b12676a662db63a" exitCode=0 Dec 11 22:02:53 crc kubenswrapper[4956]: I1211 22:02:52.890389 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" event={"ID":"7a89c45b-e195-4012-a532-aa4430a52d63","Type":"ContainerDied","Data":"9620b6847164f514b527752b87f5b72e3c95fcbb09de35b84b12676a662db63a"} Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.115251 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.257117 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-bundle\") pod \"7a89c45b-e195-4012-a532-aa4430a52d63\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.257185 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-util\") pod \"7a89c45b-e195-4012-a532-aa4430a52d63\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.257223 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr5qk\" (UniqueName: \"kubernetes.io/projected/7a89c45b-e195-4012-a532-aa4430a52d63-kube-api-access-fr5qk\") pod \"7a89c45b-e195-4012-a532-aa4430a52d63\" (UID: \"7a89c45b-e195-4012-a532-aa4430a52d63\") " Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.259040 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-bundle" (OuterVolumeSpecName: "bundle") pod "7a89c45b-e195-4012-a532-aa4430a52d63" (UID: "7a89c45b-e195-4012-a532-aa4430a52d63"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.272983 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a89c45b-e195-4012-a532-aa4430a52d63-kube-api-access-fr5qk" (OuterVolumeSpecName: "kube-api-access-fr5qk") pod "7a89c45b-e195-4012-a532-aa4430a52d63" (UID: "7a89c45b-e195-4012-a532-aa4430a52d63"). InnerVolumeSpecName "kube-api-access-fr5qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.279697 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-util" (OuterVolumeSpecName: "util") pod "7a89c45b-e195-4012-a532-aa4430a52d63" (UID: "7a89c45b-e195-4012-a532-aa4430a52d63"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.358589 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.358645 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr5qk\" (UniqueName: \"kubernetes.io/projected/7a89c45b-e195-4012-a532-aa4430a52d63-kube-api-access-fr5qk\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.358663 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a89c45b-e195-4012-a532-aa4430a52d63-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.904490 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" event={"ID":"7a89c45b-e195-4012-a532-aa4430a52d63","Type":"ContainerDied","Data":"65c2954fcccb8fc9602e3d55dd3f24088911362d05df844938ba87d7567c8521"} Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.904545 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65c2954fcccb8fc9602e3d55dd3f24088911362d05df844938ba87d7567c8521" Dec 11 22:02:54 crc kubenswrapper[4956]: I1211 22:02:54.904635 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.846899 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vp7xn"] Dec 11 22:02:55 crc kubenswrapper[4956]: E1211 22:02:55.847852 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="util" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.847962 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="util" Dec 11 22:02:55 crc kubenswrapper[4956]: E1211 22:02:55.848039 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="pull" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.848111 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="pull" Dec 11 22:02:55 crc kubenswrapper[4956]: E1211 22:02:55.848196 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="extract" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.848281 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="extract" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.848504 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a89c45b-e195-4012-a532-aa4430a52d63" containerName="extract" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.849521 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.864738 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vp7xn"] Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.978413 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-utilities\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.978487 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt77q\" (UniqueName: \"kubernetes.io/projected/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-kube-api-access-lt77q\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:55 crc kubenswrapper[4956]: I1211 22:02:55.978532 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-catalog-content\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.081018 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt77q\" (UniqueName: \"kubernetes.io/projected/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-kube-api-access-lt77q\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.081561 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-catalog-content\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.082066 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-utilities\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.082455 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-catalog-content\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.082688 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-utilities\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.100363 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt77q\" (UniqueName: \"kubernetes.io/projected/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-kube-api-access-lt77q\") pod \"redhat-marketplace-vp7xn\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.166408 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.621430 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vp7xn"] Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.918617 4956 generic.go:334] "Generic (PLEG): container finished" podID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerID="3d1b0104486b560a4a3f754d259df2a420de2b9b2f7531a025e92532e952457f" exitCode=0 Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.918669 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vp7xn" event={"ID":"b5ac45e0-2ddc-4544-b9c4-b033b169c25b","Type":"ContainerDied","Data":"3d1b0104486b560a4a3f754d259df2a420de2b9b2f7531a025e92532e952457f"} Dec 11 22:02:56 crc kubenswrapper[4956]: I1211 22:02:56.918695 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vp7xn" event={"ID":"b5ac45e0-2ddc-4544-b9c4-b033b169c25b","Type":"ContainerStarted","Data":"a45db06be8374e2504f1db731aa114b9964bc59acd76d7aed029cdcd6f7d356a"} Dec 11 22:02:57 crc kubenswrapper[4956]: I1211 22:02:57.929434 4956 generic.go:334] "Generic (PLEG): container finished" podID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerID="8945d8163d8026d4956af2281fcd83e6e26f126929957d6c6e5bfaf8eb2eab7f" exitCode=0 Dec 11 22:02:57 crc kubenswrapper[4956]: I1211 22:02:57.929515 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vp7xn" event={"ID":"b5ac45e0-2ddc-4544-b9c4-b033b169c25b","Type":"ContainerDied","Data":"8945d8163d8026d4956af2281fcd83e6e26f126929957d6c6e5bfaf8eb2eab7f"} Dec 11 22:02:58 crc kubenswrapper[4956]: I1211 22:02:58.937628 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vp7xn" event={"ID":"b5ac45e0-2ddc-4544-b9c4-b033b169c25b","Type":"ContainerStarted","Data":"04ba3d07bcfc4e0e13bc821d476b9519d19430be28784847c9ef36295e95ac50"} Dec 11 22:02:58 crc kubenswrapper[4956]: I1211 22:02:58.954761 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vp7xn" podStartSLOduration=2.342983101 podStartE2EDuration="3.954745059s" podCreationTimestamp="2025-12-11 22:02:55 +0000 UTC" firstStartedPulling="2025-12-11 22:02:56.920715055 +0000 UTC m=+869.365093205" lastFinishedPulling="2025-12-11 22:02:58.532477013 +0000 UTC m=+870.976855163" observedRunningTime="2025-12-11 22:02:58.953681261 +0000 UTC m=+871.398059431" watchObservedRunningTime="2025-12-11 22:02:58.954745059 +0000 UTC m=+871.399123209" Dec 11 22:03:06 crc kubenswrapper[4956]: I1211 22:03:06.166928 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:03:06 crc kubenswrapper[4956]: I1211 22:03:06.167595 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:03:06 crc kubenswrapper[4956]: I1211 22:03:06.205019 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.038260 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.054581 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298"] Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.056030 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.064091 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.064379 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-k7x9p" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.073934 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298"] Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.236184 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2c91f172-3850-43c1-b558-d0c87f7e2797-apiservice-cert\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.236373 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnhhg\" (UniqueName: \"kubernetes.io/projected/2c91f172-3850-43c1-b558-d0c87f7e2797-kube-api-access-fnhhg\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.236534 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2c91f172-3850-43c1-b558-d0c87f7e2797-webhook-cert\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.338231 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnhhg\" (UniqueName: \"kubernetes.io/projected/2c91f172-3850-43c1-b558-d0c87f7e2797-kube-api-access-fnhhg\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.338313 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2c91f172-3850-43c1-b558-d0c87f7e2797-webhook-cert\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.338356 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2c91f172-3850-43c1-b558-d0c87f7e2797-apiservice-cert\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.343959 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2c91f172-3850-43c1-b558-d0c87f7e2797-webhook-cert\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.345002 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2c91f172-3850-43c1-b558-d0c87f7e2797-apiservice-cert\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.359624 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnhhg\" (UniqueName: \"kubernetes.io/projected/2c91f172-3850-43c1-b558-d0c87f7e2797-kube-api-access-fnhhg\") pod \"infra-operator-controller-manager-7b44544c9d-fw298\" (UID: \"2c91f172-3850-43c1-b558-d0c87f7e2797\") " pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:07 crc kubenswrapper[4956]: I1211 22:03:07.382962 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.181839 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298"] Dec 11 22:03:08 crc kubenswrapper[4956]: W1211 22:03:08.191834 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c91f172_3850_43c1_b558_d0c87f7e2797.slice/crio-df9801e6ae55f9aa863566f073a6cea9430db3bcf3c3e80da735ae73e2944c6d WatchSource:0}: Error finding container df9801e6ae55f9aa863566f073a6cea9430db3bcf3c3e80da735ae73e2944c6d: Status 404 returned error can't find the container with id df9801e6ae55f9aa863566f073a6cea9430db3bcf3c3e80da735ae73e2944c6d Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.818565 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.819707 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.821714 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openstack-config-data" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.822218 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openstack-scripts" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.822428 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"galera-openstack-dockercfg-xdnqb" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.822501 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openshift-service-ca.crt" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.822568 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"kube-root-ca.crt" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.842075 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.848081 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.849181 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.853650 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.854947 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.855553 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.855618 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-config-data-default\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.855653 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.855692 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bddf5832-7ec1-4c44-a8ff-7c6eae681927-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.855710 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnfc4\" (UniqueName: \"kubernetes.io/projected/bddf5832-7ec1-4c44-a8ff-7c6eae681927-kube-api-access-hnfc4\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.855736 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-kolla-config\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.860672 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.887699 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956332 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9e73ef6f-6323-44c0-9ae8-b14eda333297-config-data-generated\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956392 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956423 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-config-data-default\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956446 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-operator-scripts\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956466 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-config-data-default\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956491 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-kolla-config\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956517 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956545 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7vkx\" (UniqueName: \"kubernetes.io/projected/9e73ef6f-6323-44c0-9ae8-b14eda333297-kube-api-access-r7vkx\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956597 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bddf5832-7ec1-4c44-a8ff-7c6eae681927-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956623 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnfc4\" (UniqueName: \"kubernetes.io/projected/bddf5832-7ec1-4c44-a8ff-7c6eae681927-kube-api-access-hnfc4\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956646 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-756hv\" (UniqueName: \"kubernetes.io/projected/94805819-605d-47fe-9670-957c387a50fb-kube-api-access-756hv\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956670 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-kolla-config\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956688 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-config-data-default\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956718 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-operator-scripts\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956739 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-kolla-config\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956761 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94805819-605d-47fe-9670-957c387a50fb-config-data-generated\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956805 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.956836 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.957555 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bddf5832-7ec1-4c44-a8ff-7c6eae681927-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.957900 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-kolla-config\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.958143 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") device mount path \"/mnt/openstack/pv11\"" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.958931 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-config-data-default\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.966659 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bddf5832-7ec1-4c44-a8ff-7c6eae681927-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.973531 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnfc4\" (UniqueName: \"kubernetes.io/projected/bddf5832-7ec1-4c44-a8ff-7c6eae681927-kube-api-access-hnfc4\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:08 crc kubenswrapper[4956]: I1211 22:03:08.974245 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"bddf5832-7ec1-4c44-a8ff-7c6eae681927\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.057729 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.057804 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-operator-scripts\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.057830 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-config-data-default\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.057864 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-kolla-config\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.058115 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7vkx\" (UniqueName: \"kubernetes.io/projected/9e73ef6f-6323-44c0-9ae8-b14eda333297-kube-api-access-r7vkx\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.058178 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-756hv\" (UniqueName: \"kubernetes.io/projected/94805819-605d-47fe-9670-957c387a50fb-kube-api-access-756hv\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.058210 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") device mount path \"/mnt/openstack/pv02\"" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.058861 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-config-data-default\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.058940 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-operator-scripts\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.058985 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-kolla-config\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059042 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94805819-605d-47fe-9670-957c387a50fb-config-data-generated\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059084 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059120 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-config-data-default\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059128 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9e73ef6f-6323-44c0-9ae8-b14eda333297-config-data-generated\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059298 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-kolla-config\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059383 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94805819-605d-47fe-9670-957c387a50fb-config-data-generated\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059425 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") device mount path \"/mnt/openstack/pv05\"" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.059452 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9e73ef6f-6323-44c0-9ae8-b14eda333297-config-data-generated\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.060186 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-kolla-config\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.060371 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-config-data-default\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.060961 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94805819-605d-47fe-9670-957c387a50fb-operator-scripts\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.062112 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e73ef6f-6323-44c0-9ae8-b14eda333297-operator-scripts\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.075304 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.077018 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.082145 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7vkx\" (UniqueName: \"kubernetes.io/projected/9e73ef6f-6323-44c0-9ae8-b14eda333297-kube-api-access-r7vkx\") pod \"openstack-galera-2\" (UID: \"9e73ef6f-6323-44c0-9ae8-b14eda333297\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.087930 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-756hv\" (UniqueName: \"kubernetes.io/projected/94805819-605d-47fe-9670-957c387a50fb-kube-api-access-756hv\") pod \"openstack-galera-1\" (UID: \"94805819-605d-47fe-9670-957c387a50fb\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.118152 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" event={"ID":"2c91f172-3850-43c1-b558-d0c87f7e2797","Type":"ContainerStarted","Data":"df9801e6ae55f9aa863566f073a6cea9430db3bcf3c3e80da735ae73e2944c6d"} Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.141019 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.166517 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.177731 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.898809 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 11 22:03:09 crc kubenswrapper[4956]: W1211 22:03:09.903145 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbddf5832_7ec1_4c44_a8ff_7c6eae681927.slice/crio-b084c818afdf247431b54a7c7966b7c89891f14ec6c3db70c68d38dc49e89822 WatchSource:0}: Error finding container b084c818afdf247431b54a7c7966b7c89891f14ec6c3db70c68d38dc49e89822: Status 404 returned error can't find the container with id b084c818afdf247431b54a7c7966b7c89891f14ec6c3db70c68d38dc49e89822 Dec 11 22:03:09 crc kubenswrapper[4956]: I1211 22:03:09.917398 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 11 22:03:10 crc kubenswrapper[4956]: I1211 22:03:10.124434 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"bddf5832-7ec1-4c44-a8ff-7c6eae681927","Type":"ContainerStarted","Data":"b084c818afdf247431b54a7c7966b7c89891f14ec6c3db70c68d38dc49e89822"} Dec 11 22:03:10 crc kubenswrapper[4956]: I1211 22:03:10.126699 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"94805819-605d-47fe-9670-957c387a50fb","Type":"ContainerStarted","Data":"abdcb0fd9f569de4020d6fcf5525d4777dde2c3fac5ddbefff72dbca286c515c"} Dec 11 22:03:10 crc kubenswrapper[4956]: I1211 22:03:10.166254 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 11 22:03:10 crc kubenswrapper[4956]: W1211 22:03:10.177022 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e73ef6f_6323_44c0_9ae8_b14eda333297.slice/crio-94b1474f793aa56e3dd14697421e5672caed7984f7e074eaf86e7c1c9d5f45b3 WatchSource:0}: Error finding container 94b1474f793aa56e3dd14697421e5672caed7984f7e074eaf86e7c1c9d5f45b3: Status 404 returned error can't find the container with id 94b1474f793aa56e3dd14697421e5672caed7984f7e074eaf86e7c1c9d5f45b3 Dec 11 22:03:10 crc kubenswrapper[4956]: I1211 22:03:10.836998 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vp7xn"] Dec 11 22:03:10 crc kubenswrapper[4956]: I1211 22:03:10.837235 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vp7xn" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="registry-server" containerID="cri-o://04ba3d07bcfc4e0e13bc821d476b9519d19430be28784847c9ef36295e95ac50" gracePeriod=2 Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.133056 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"9e73ef6f-6323-44c0-9ae8-b14eda333297","Type":"ContainerStarted","Data":"94b1474f793aa56e3dd14697421e5672caed7984f7e074eaf86e7c1c9d5f45b3"} Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.135015 4956 generic.go:334] "Generic (PLEG): container finished" podID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerID="04ba3d07bcfc4e0e13bc821d476b9519d19430be28784847c9ef36295e95ac50" exitCode=0 Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.135041 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vp7xn" event={"ID":"b5ac45e0-2ddc-4544-b9c4-b033b169c25b","Type":"ContainerDied","Data":"04ba3d07bcfc4e0e13bc821d476b9519d19430be28784847c9ef36295e95ac50"} Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.639488 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.764343 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-utilities\") pod \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.764926 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-catalog-content\") pod \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.765015 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt77q\" (UniqueName: \"kubernetes.io/projected/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-kube-api-access-lt77q\") pod \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\" (UID: \"b5ac45e0-2ddc-4544-b9c4-b033b169c25b\") " Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.765286 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-utilities" (OuterVolumeSpecName: "utilities") pod "b5ac45e0-2ddc-4544-b9c4-b033b169c25b" (UID: "b5ac45e0-2ddc-4544-b9c4-b033b169c25b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.765426 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.785103 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-kube-api-access-lt77q" (OuterVolumeSpecName: "kube-api-access-lt77q") pod "b5ac45e0-2ddc-4544-b9c4-b033b169c25b" (UID: "b5ac45e0-2ddc-4544-b9c4-b033b169c25b"). InnerVolumeSpecName "kube-api-access-lt77q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.788230 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5ac45e0-2ddc-4544-b9c4-b033b169c25b" (UID: "b5ac45e0-2ddc-4544-b9c4-b033b169c25b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.866694 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:03:11 crc kubenswrapper[4956]: I1211 22:03:11.866729 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt77q\" (UniqueName: \"kubernetes.io/projected/b5ac45e0-2ddc-4544-b9c4-b033b169c25b-kube-api-access-lt77q\") on node \"crc\" DevicePath \"\"" Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.192901 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" event={"ID":"2c91f172-3850-43c1-b558-d0c87f7e2797","Type":"ContainerStarted","Data":"40bd658e23663ca33a88a793c87a36fb8144b2f0116f58671bba030895d03f0c"} Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.193097 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.201524 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vp7xn" event={"ID":"b5ac45e0-2ddc-4544-b9c4-b033b169c25b","Type":"ContainerDied","Data":"a45db06be8374e2504f1db731aa114b9964bc59acd76d7aed029cdcd6f7d356a"} Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.201573 4956 scope.go:117] "RemoveContainer" containerID="04ba3d07bcfc4e0e13bc821d476b9519d19430be28784847c9ef36295e95ac50" Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.201685 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vp7xn" Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.219729 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" podStartSLOduration=2.039166834 podStartE2EDuration="5.219707117s" podCreationTimestamp="2025-12-11 22:03:07 +0000 UTC" firstStartedPulling="2025-12-11 22:03:08.196209894 +0000 UTC m=+880.640588034" lastFinishedPulling="2025-12-11 22:03:11.376750167 +0000 UTC m=+883.821128317" observedRunningTime="2025-12-11 22:03:12.214245139 +0000 UTC m=+884.658623289" watchObservedRunningTime="2025-12-11 22:03:12.219707117 +0000 UTC m=+884.664085277" Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.234259 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vp7xn"] Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.245026 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vp7xn"] Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.248958 4956 scope.go:117] "RemoveContainer" containerID="8945d8163d8026d4956af2281fcd83e6e26f126929957d6c6e5bfaf8eb2eab7f" Dec 11 22:03:12 crc kubenswrapper[4956]: I1211 22:03:12.270150 4956 scope.go:117] "RemoveContainer" containerID="3d1b0104486b560a4a3f754d259df2a420de2b9b2f7531a025e92532e952457f" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.103079 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rhjjc"] Dec 11 22:03:13 crc kubenswrapper[4956]: E1211 22:03:13.103638 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="extract-utilities" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.103712 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="extract-utilities" Dec 11 22:03:13 crc kubenswrapper[4956]: E1211 22:03:13.103794 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="extract-content" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.103849 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="extract-content" Dec 11 22:03:13 crc kubenswrapper[4956]: E1211 22:03:13.103901 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="registry-server" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.103948 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="registry-server" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.104160 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" containerName="registry-server" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.106356 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.124229 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rhjjc"] Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.261413 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-utilities\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.261468 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-catalog-content\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.261551 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr9hq\" (UniqueName: \"kubernetes.io/projected/d1df7ab6-bddf-44db-9eec-a2a9f238a068-kube-api-access-dr9hq\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.362423 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-utilities\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.362479 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-catalog-content\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.362559 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr9hq\" (UniqueName: \"kubernetes.io/projected/d1df7ab6-bddf-44db-9eec-a2a9f238a068-kube-api-access-dr9hq\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.363433 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-utilities\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.364088 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-catalog-content\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.384574 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr9hq\" (UniqueName: \"kubernetes.io/projected/d1df7ab6-bddf-44db-9eec-a2a9f238a068-kube-api-access-dr9hq\") pod \"certified-operators-rhjjc\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:13 crc kubenswrapper[4956]: I1211 22:03:13.453573 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:14 crc kubenswrapper[4956]: I1211 22:03:14.035264 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5ac45e0-2ddc-4544-b9c4-b033b169c25b" path="/var/lib/kubelet/pods/b5ac45e0-2ddc-4544-b9c4-b033b169c25b/volumes" Dec 11 22:03:14 crc kubenswrapper[4956]: I1211 22:03:14.064343 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rhjjc"] Dec 11 22:03:14 crc kubenswrapper[4956]: I1211 22:03:14.260349 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rhjjc" event={"ID":"d1df7ab6-bddf-44db-9eec-a2a9f238a068","Type":"ContainerStarted","Data":"fe391fc3c7d820e84ce0e2cef4f95fee51a8c9c89a3cd70c2e215503fc3f854c"} Dec 11 22:03:15 crc kubenswrapper[4956]: I1211 22:03:15.268240 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerID="3b5833eb328c5610a6393c184dcf9daca8f4f266b37b2b74c1eaba76ea8492b4" exitCode=0 Dec 11 22:03:15 crc kubenswrapper[4956]: I1211 22:03:15.268335 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rhjjc" event={"ID":"d1df7ab6-bddf-44db-9eec-a2a9f238a068","Type":"ContainerDied","Data":"3b5833eb328c5610a6393c184dcf9daca8f4f266b37b2b74c1eaba76ea8492b4"} Dec 11 22:03:17 crc kubenswrapper[4956]: I1211 22:03:17.395148 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7b44544c9d-fw298" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.602361 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.603655 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.605888 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"memcached-memcached-dockercfg-qw69v" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.606094 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"memcached-config-data" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.607252 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.690177 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c52pv\" (UniqueName: \"kubernetes.io/projected/ebf3438f-f633-4343-afbb-fab7515a880f-kube-api-access-c52pv\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.690247 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ebf3438f-f633-4343-afbb-fab7515a880f-kolla-config\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.690335 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ebf3438f-f633-4343-afbb-fab7515a880f-config-data\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.791936 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c52pv\" (UniqueName: \"kubernetes.io/projected/ebf3438f-f633-4343-afbb-fab7515a880f-kube-api-access-c52pv\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.793078 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ebf3438f-f633-4343-afbb-fab7515a880f-kolla-config\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.793266 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ebf3438f-f633-4343-afbb-fab7515a880f-config-data\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.796904 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ebf3438f-f633-4343-afbb-fab7515a880f-kolla-config\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.797212 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ebf3438f-f633-4343-afbb-fab7515a880f-config-data\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.809072 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c52pv\" (UniqueName: \"kubernetes.io/projected/ebf3438f-f633-4343-afbb-fab7515a880f-kube-api-access-c52pv\") pod \"memcached-0\" (UID: \"ebf3438f-f633-4343-afbb-fab7515a880f\") " pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:20 crc kubenswrapper[4956]: I1211 22:03:20.925963 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.458317 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-nrbcn"] Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.478324 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.488013 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-gh9l9" Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.519328 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6c47\" (UniqueName: \"kubernetes.io/projected/c8f134b9-199f-44f5-b6d4-2fd3b7766db7-kube-api-access-f6c47\") pod \"rabbitmq-cluster-operator-index-nrbcn\" (UID: \"c8f134b9-199f-44f5-b6d4-2fd3b7766db7\") " pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.520792 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-nrbcn"] Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.620580 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6c47\" (UniqueName: \"kubernetes.io/projected/c8f134b9-199f-44f5-b6d4-2fd3b7766db7-kube-api-access-f6c47\") pod \"rabbitmq-cluster-operator-index-nrbcn\" (UID: \"c8f134b9-199f-44f5-b6d4-2fd3b7766db7\") " pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.642736 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 11 22:03:24 crc kubenswrapper[4956]: W1211 22:03:24.649907 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebf3438f_f633_4343_afbb_fab7515a880f.slice/crio-82f9894ec2d882fe97b6cc32d1dc331c5d09f42298bd892a17f6cebcf1abdfaa WatchSource:0}: Error finding container 82f9894ec2d882fe97b6cc32d1dc331c5d09f42298bd892a17f6cebcf1abdfaa: Status 404 returned error can't find the container with id 82f9894ec2d882fe97b6cc32d1dc331c5d09f42298bd892a17f6cebcf1abdfaa Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.651910 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6c47\" (UniqueName: \"kubernetes.io/projected/c8f134b9-199f-44f5-b6d4-2fd3b7766db7-kube-api-access-f6c47\") pod \"rabbitmq-cluster-operator-index-nrbcn\" (UID: \"c8f134b9-199f-44f5-b6d4-2fd3b7766db7\") " pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:24 crc kubenswrapper[4956]: I1211 22:03:24.816074 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.358793 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"ebf3438f-f633-4343-afbb-fab7515a880f","Type":"ContainerStarted","Data":"82f9894ec2d882fe97b6cc32d1dc331c5d09f42298bd892a17f6cebcf1abdfaa"} Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.360223 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerID="820194886d05732d6658bbebb450aab370c42c874153ff6b59db87e96836b712" exitCode=0 Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.360270 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rhjjc" event={"ID":"d1df7ab6-bddf-44db-9eec-a2a9f238a068","Type":"ContainerDied","Data":"820194886d05732d6658bbebb450aab370c42c874153ff6b59db87e96836b712"} Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.362953 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"94805819-605d-47fe-9670-957c387a50fb","Type":"ContainerStarted","Data":"61ff72f5dca2df719b4a547030afbc8ff85b2285ae2ff48e647ce8714e5bc97d"} Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.367040 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"bddf5832-7ec1-4c44-a8ff-7c6eae681927","Type":"ContainerStarted","Data":"7460e93e95402f3decb7746254e4486f9bc0e9b7565a20329c26d99bd06c81c3"} Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.369328 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"9e73ef6f-6323-44c0-9ae8-b14eda333297","Type":"ContainerStarted","Data":"d250da4d1793623c6337b076048971cb9e591b7e0362e2e62acda09f92edc5fc"} Dec 11 22:03:25 crc kubenswrapper[4956]: I1211 22:03:25.565800 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-nrbcn"] Dec 11 22:03:26 crc kubenswrapper[4956]: I1211 22:03:26.378342 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rhjjc" event={"ID":"d1df7ab6-bddf-44db-9eec-a2a9f238a068","Type":"ContainerStarted","Data":"d0d44a175636dc75011de80333e53dd83e7fd30c6d75d538f2fe0cd7dd0857c6"} Dec 11 22:03:26 crc kubenswrapper[4956]: I1211 22:03:26.379563 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" event={"ID":"c8f134b9-199f-44f5-b6d4-2fd3b7766db7","Type":"ContainerStarted","Data":"d90893275fcddffc15f8beec65b01fedcde4c0e28e2e0ed277af18be4e9c5b56"} Dec 11 22:03:26 crc kubenswrapper[4956]: I1211 22:03:26.450579 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rhjjc" podStartSLOduration=2.600924183 podStartE2EDuration="13.450555436s" podCreationTimestamp="2025-12-11 22:03:13 +0000 UTC" firstStartedPulling="2025-12-11 22:03:15.271873884 +0000 UTC m=+887.716252034" lastFinishedPulling="2025-12-11 22:03:26.121505137 +0000 UTC m=+898.565883287" observedRunningTime="2025-12-11 22:03:26.447942435 +0000 UTC m=+898.892320575" watchObservedRunningTime="2025-12-11 22:03:26.450555436 +0000 UTC m=+898.894933596" Dec 11 22:03:29 crc kubenswrapper[4956]: I1211 22:03:29.406392 4956 generic.go:334] "Generic (PLEG): container finished" podID="94805819-605d-47fe-9670-957c387a50fb" containerID="61ff72f5dca2df719b4a547030afbc8ff85b2285ae2ff48e647ce8714e5bc97d" exitCode=0 Dec 11 22:03:29 crc kubenswrapper[4956]: I1211 22:03:29.406452 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"94805819-605d-47fe-9670-957c387a50fb","Type":"ContainerDied","Data":"61ff72f5dca2df719b4a547030afbc8ff85b2285ae2ff48e647ce8714e5bc97d"} Dec 11 22:03:29 crc kubenswrapper[4956]: I1211 22:03:29.409173 4956 generic.go:334] "Generic (PLEG): container finished" podID="bddf5832-7ec1-4c44-a8ff-7c6eae681927" containerID="7460e93e95402f3decb7746254e4486f9bc0e9b7565a20329c26d99bd06c81c3" exitCode=0 Dec 11 22:03:29 crc kubenswrapper[4956]: I1211 22:03:29.409233 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"bddf5832-7ec1-4c44-a8ff-7c6eae681927","Type":"ContainerDied","Data":"7460e93e95402f3decb7746254e4486f9bc0e9b7565a20329c26d99bd06c81c3"} Dec 11 22:03:29 crc kubenswrapper[4956]: I1211 22:03:29.411276 4956 generic.go:334] "Generic (PLEG): container finished" podID="9e73ef6f-6323-44c0-9ae8-b14eda333297" containerID="d250da4d1793623c6337b076048971cb9e591b7e0362e2e62acda09f92edc5fc" exitCode=0 Dec 11 22:03:29 crc kubenswrapper[4956]: I1211 22:03:29.411310 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"9e73ef6f-6323-44c0-9ae8-b14eda333297","Type":"ContainerDied","Data":"d250da4d1793623c6337b076048971cb9e591b7e0362e2e62acda09f92edc5fc"} Dec 11 22:03:33 crc kubenswrapper[4956]: I1211 22:03:33.454567 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:33 crc kubenswrapper[4956]: I1211 22:03:33.455212 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:33 crc kubenswrapper[4956]: I1211 22:03:33.495194 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:34 crc kubenswrapper[4956]: I1211 22:03:34.497574 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:38 crc kubenswrapper[4956]: I1211 22:03:38.075231 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rhjjc"] Dec 11 22:03:38 crc kubenswrapper[4956]: I1211 22:03:38.075759 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rhjjc" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="registry-server" containerID="cri-o://d0d44a175636dc75011de80333e53dd83e7fd30c6d75d538f2fe0cd7dd0857c6" gracePeriod=2 Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.488622 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" event={"ID":"c8f134b9-199f-44f5-b6d4-2fd3b7766db7","Type":"ContainerStarted","Data":"cefdb3e1acd0d4eb84cf38eb0f8f891d5155a5feb11c78207b6f29b8dc4e8fa7"} Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.490878 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"bddf5832-7ec1-4c44-a8ff-7c6eae681927","Type":"ContainerStarted","Data":"1d2bdfc30320f458287be375371fd596489a9ff300d82561ac4529d9cd6c783d"} Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.492611 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"9e73ef6f-6323-44c0-9ae8-b14eda333297","Type":"ContainerStarted","Data":"bab5772297a194c63951d506e43ddec57e8e7d1d91543e824188e10a0a28ac6c"} Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.495005 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"ebf3438f-f633-4343-afbb-fab7515a880f","Type":"ContainerStarted","Data":"a74828e3506a05ee06ef54376c494ce822e1339af31e76192482cf974b126f40"} Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.495151 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.496381 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rhjjc_d1df7ab6-bddf-44db-9eec-a2a9f238a068/registry-server/0.log" Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.497070 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerID="d0d44a175636dc75011de80333e53dd83e7fd30c6d75d538f2fe0cd7dd0857c6" exitCode=137 Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.497130 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rhjjc" event={"ID":"d1df7ab6-bddf-44db-9eec-a2a9f238a068","Type":"ContainerDied","Data":"d0d44a175636dc75011de80333e53dd83e7fd30c6d75d538f2fe0cd7dd0857c6"} Dec 11 22:03:42 crc kubenswrapper[4956]: I1211 22:03:42.498754 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"94805819-605d-47fe-9670-957c387a50fb","Type":"ContainerStarted","Data":"29f944dfcf353e4d181579f513357a455d87e7c26121cd38c363606c79e84036"} Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.250477 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rhjjc_d1df7ab6-bddf-44db-9eec-a2a9f238a068/registry-server/0.log" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.251624 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.271144 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/memcached-0" podStartSLOduration=14.041752035 podStartE2EDuration="23.2710418s" podCreationTimestamp="2025-12-11 22:03:20 +0000 UTC" firstStartedPulling="2025-12-11 22:03:24.654569122 +0000 UTC m=+897.098947272" lastFinishedPulling="2025-12-11 22:03:33.883858887 +0000 UTC m=+906.328237037" observedRunningTime="2025-12-11 22:03:42.513423374 +0000 UTC m=+914.957801524" watchObservedRunningTime="2025-12-11 22:03:43.2710418 +0000 UTC m=+915.715419950" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.463421 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr9hq\" (UniqueName: \"kubernetes.io/projected/d1df7ab6-bddf-44db-9eec-a2a9f238a068-kube-api-access-dr9hq\") pod \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.463470 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-catalog-content\") pod \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.463502 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-utilities\") pod \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\" (UID: \"d1df7ab6-bddf-44db-9eec-a2a9f238a068\") " Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.464579 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-utilities" (OuterVolumeSpecName: "utilities") pod "d1df7ab6-bddf-44db-9eec-a2a9f238a068" (UID: "d1df7ab6-bddf-44db-9eec-a2a9f238a068"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.469275 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1df7ab6-bddf-44db-9eec-a2a9f238a068-kube-api-access-dr9hq" (OuterVolumeSpecName: "kube-api-access-dr9hq") pod "d1df7ab6-bddf-44db-9eec-a2a9f238a068" (UID: "d1df7ab6-bddf-44db-9eec-a2a9f238a068"). InnerVolumeSpecName "kube-api-access-dr9hq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.505017 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rhjjc_d1df7ab6-bddf-44db-9eec-a2a9f238a068/registry-server/0.log" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.505902 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rhjjc" event={"ID":"d1df7ab6-bddf-44db-9eec-a2a9f238a068","Type":"ContainerDied","Data":"fe391fc3c7d820e84ce0e2cef4f95fee51a8c9c89a3cd70c2e215503fc3f854c"} Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.505981 4956 scope.go:117] "RemoveContainer" containerID="d0d44a175636dc75011de80333e53dd83e7fd30c6d75d538f2fe0cd7dd0857c6" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.505937 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rhjjc" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.520618 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1df7ab6-bddf-44db-9eec-a2a9f238a068" (UID: "d1df7ab6-bddf-44db-9eec-a2a9f238a068"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.534517 4956 scope.go:117] "RemoveContainer" containerID="820194886d05732d6658bbebb450aab370c42c874153ff6b59db87e96836b712" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.535189 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-2" podStartSLOduration=22.515059673 podStartE2EDuration="36.53516629s" podCreationTimestamp="2025-12-11 22:03:07 +0000 UTC" firstStartedPulling="2025-12-11 22:03:10.197881233 +0000 UTC m=+882.642259383" lastFinishedPulling="2025-12-11 22:03:24.21798785 +0000 UTC m=+896.662366000" observedRunningTime="2025-12-11 22:03:43.521372848 +0000 UTC m=+915.965750988" watchObservedRunningTime="2025-12-11 22:03:43.53516629 +0000 UTC m=+915.979544450" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.541076 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" podStartSLOduration=8.495583804 podStartE2EDuration="19.541060069s" podCreationTimestamp="2025-12-11 22:03:24 +0000 UTC" firstStartedPulling="2025-12-11 22:03:25.585660433 +0000 UTC m=+898.030038583" lastFinishedPulling="2025-12-11 22:03:36.631136698 +0000 UTC m=+909.075514848" observedRunningTime="2025-12-11 22:03:43.537856903 +0000 UTC m=+915.982235063" watchObservedRunningTime="2025-12-11 22:03:43.541060069 +0000 UTC m=+915.985438219" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.566828 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-1" podStartSLOduration=22.399940143 podStartE2EDuration="36.566774283s" podCreationTimestamp="2025-12-11 22:03:07 +0000 UTC" firstStartedPulling="2025-12-11 22:03:09.943893512 +0000 UTC m=+882.388271662" lastFinishedPulling="2025-12-11 22:03:24.110727652 +0000 UTC m=+896.555105802" observedRunningTime="2025-12-11 22:03:43.56409426 +0000 UTC m=+916.008472430" watchObservedRunningTime="2025-12-11 22:03:43.566774283 +0000 UTC m=+916.011152433" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.574479 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr9hq\" (UniqueName: \"kubernetes.io/projected/d1df7ab6-bddf-44db-9eec-a2a9f238a068-kube-api-access-dr9hq\") on node \"crc\" DevicePath \"\"" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.574501 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.574523 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1df7ab6-bddf-44db-9eec-a2a9f238a068-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.574626 4956 scope.go:117] "RemoveContainer" containerID="3b5833eb328c5610a6393c184dcf9daca8f4f266b37b2b74c1eaba76ea8492b4" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.596118 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-0" podStartSLOduration=22.341255246 podStartE2EDuration="36.596085963s" podCreationTimestamp="2025-12-11 22:03:07 +0000 UTC" firstStartedPulling="2025-12-11 22:03:09.909599335 +0000 UTC m=+882.353977485" lastFinishedPulling="2025-12-11 22:03:24.164430052 +0000 UTC m=+896.608808202" observedRunningTime="2025-12-11 22:03:43.595321452 +0000 UTC m=+916.039699612" watchObservedRunningTime="2025-12-11 22:03:43.596085963 +0000 UTC m=+916.040464123" Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.837948 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rhjjc"] Dec 11 22:03:43 crc kubenswrapper[4956]: I1211 22:03:43.840592 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rhjjc"] Dec 11 22:03:44 crc kubenswrapper[4956]: I1211 22:03:44.027940 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" path="/var/lib/kubelet/pods/d1df7ab6-bddf-44db-9eec-a2a9f238a068/volumes" Dec 11 22:03:44 crc kubenswrapper[4956]: I1211 22:03:44.816282 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:44 crc kubenswrapper[4956]: I1211 22:03:44.817628 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:44 crc kubenswrapper[4956]: I1211 22:03:44.866394 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:49 crc kubenswrapper[4956]: I1211 22:03:49.141420 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:49 crc kubenswrapper[4956]: I1211 22:03:49.142050 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:03:49 crc kubenswrapper[4956]: I1211 22:03:49.186259 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:49 crc kubenswrapper[4956]: I1211 22:03:49.186729 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:49 crc kubenswrapper[4956]: I1211 22:03:49.186798 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:03:49 crc kubenswrapper[4956]: I1211 22:03:49.186821 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:50 crc kubenswrapper[4956]: I1211 22:03:50.926940 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/memcached-0" Dec 11 22:03:51 crc kubenswrapper[4956]: I1211 22:03:51.470481 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:51 crc kubenswrapper[4956]: I1211 22:03:51.549055 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-2" Dec 11 22:03:54 crc kubenswrapper[4956]: I1211 22:03:54.844166 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-nrbcn" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.844935 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/root-account-create-update-c4fh7"] Dec 11 22:03:57 crc kubenswrapper[4956]: E1211 22:03:57.845548 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="extract-content" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.845562 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="extract-content" Dec 11 22:03:57 crc kubenswrapper[4956]: E1211 22:03:57.845580 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="extract-utilities" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.845587 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="extract-utilities" Dec 11 22:03:57 crc kubenswrapper[4956]: E1211 22:03:57.845596 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="registry-server" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.845603 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="registry-server" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.845743 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1df7ab6-bddf-44db-9eec-a2a9f238a068" containerName="registry-server" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.846307 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.848719 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"openstack-mariadb-root-db-secret" Dec 11 22:03:57 crc kubenswrapper[4956]: I1211 22:03:57.877713 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/root-account-create-update-c4fh7"] Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.012618 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mw86\" (UniqueName: \"kubernetes.io/projected/5aee315b-55ac-4721-8ad1-ff6128dc0b15-kube-api-access-7mw86\") pod \"root-account-create-update-c4fh7\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.012966 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5aee315b-55ac-4721-8ad1-ff6128dc0b15-operator-scripts\") pod \"root-account-create-update-c4fh7\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.114616 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5aee315b-55ac-4721-8ad1-ff6128dc0b15-operator-scripts\") pod \"root-account-create-update-c4fh7\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.114740 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mw86\" (UniqueName: \"kubernetes.io/projected/5aee315b-55ac-4721-8ad1-ff6128dc0b15-kube-api-access-7mw86\") pod \"root-account-create-update-c4fh7\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.115634 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5aee315b-55ac-4721-8ad1-ff6128dc0b15-operator-scripts\") pod \"root-account-create-update-c4fh7\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.137498 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mw86\" (UniqueName: \"kubernetes.io/projected/5aee315b-55ac-4721-8ad1-ff6128dc0b15-kube-api-access-7mw86\") pod \"root-account-create-update-c4fh7\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.184137 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:03:58 crc kubenswrapper[4956]: I1211 22:03:58.812725 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/root-account-create-update-c4fh7"] Dec 11 22:03:58 crc kubenswrapper[4956]: W1211 22:03:58.819009 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5aee315b_55ac_4721_8ad1_ff6128dc0b15.slice/crio-a3374188c948d5083b2cf83930cd341e4e9bd4d8cfcbcea0481a56509cd72de2 WatchSource:0}: Error finding container a3374188c948d5083b2cf83930cd341e4e9bd4d8cfcbcea0481a56509cd72de2: Status 404 returned error can't find the container with id a3374188c948d5083b2cf83930cd341e4e9bd4d8cfcbcea0481a56509cd72de2 Dec 11 22:03:59 crc kubenswrapper[4956]: I1211 22:03:59.606478 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-c4fh7" event={"ID":"5aee315b-55ac-4721-8ad1-ff6128dc0b15","Type":"ContainerStarted","Data":"a561660c7cf172a9504610fea52d23fc6ba2c25dd99b1e8e60024938473e01d2"} Dec 11 22:03:59 crc kubenswrapper[4956]: I1211 22:03:59.607043 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-c4fh7" event={"ID":"5aee315b-55ac-4721-8ad1-ff6128dc0b15","Type":"ContainerStarted","Data":"a3374188c948d5083b2cf83930cd341e4e9bd4d8cfcbcea0481a56509cd72de2"} Dec 11 22:03:59 crc kubenswrapper[4956]: I1211 22:03:59.624712 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/root-account-create-update-c4fh7" podStartSLOduration=2.624694204 podStartE2EDuration="2.624694204s" podCreationTimestamp="2025-12-11 22:03:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:03:59.623175112 +0000 UTC m=+932.067553272" watchObservedRunningTime="2025-12-11 22:03:59.624694204 +0000 UTC m=+932.069072354" Dec 11 22:04:00 crc kubenswrapper[4956]: I1211 22:04:00.614066 4956 generic.go:334] "Generic (PLEG): container finished" podID="5aee315b-55ac-4721-8ad1-ff6128dc0b15" containerID="a561660c7cf172a9504610fea52d23fc6ba2c25dd99b1e8e60024938473e01d2" exitCode=0 Dec 11 22:04:00 crc kubenswrapper[4956]: I1211 22:04:00.614461 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-c4fh7" event={"ID":"5aee315b-55ac-4721-8ad1-ff6128dc0b15","Type":"ContainerDied","Data":"a561660c7cf172a9504610fea52d23fc6ba2c25dd99b1e8e60024938473e01d2"} Dec 11 22:04:01 crc kubenswrapper[4956]: I1211 22:04:01.308025 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:04:01 crc kubenswrapper[4956]: I1211 22:04:01.386870 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-0" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.435385 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.578097 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mw86\" (UniqueName: \"kubernetes.io/projected/5aee315b-55ac-4721-8ad1-ff6128dc0b15-kube-api-access-7mw86\") pod \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.578206 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5aee315b-55ac-4721-8ad1-ff6128dc0b15-operator-scripts\") pod \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\" (UID: \"5aee315b-55ac-4721-8ad1-ff6128dc0b15\") " Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.578672 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5aee315b-55ac-4721-8ad1-ff6128dc0b15-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5aee315b-55ac-4721-8ad1-ff6128dc0b15" (UID: "5aee315b-55ac-4721-8ad1-ff6128dc0b15"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.584180 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aee315b-55ac-4721-8ad1-ff6128dc0b15-kube-api-access-7mw86" (OuterVolumeSpecName: "kube-api-access-7mw86") pod "5aee315b-55ac-4721-8ad1-ff6128dc0b15" (UID: "5aee315b-55ac-4721-8ad1-ff6128dc0b15"). InnerVolumeSpecName "kube-api-access-7mw86". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.680376 4956 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5aee315b-55ac-4721-8ad1-ff6128dc0b15-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.680419 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mw86\" (UniqueName: \"kubernetes.io/projected/5aee315b-55ac-4721-8ad1-ff6128dc0b15-kube-api-access-7mw86\") on node \"crc\" DevicePath \"\"" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.779028 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/root-account-create-update-c4fh7" event={"ID":"5aee315b-55ac-4721-8ad1-ff6128dc0b15","Type":"ContainerDied","Data":"a3374188c948d5083b2cf83930cd341e4e9bd4d8cfcbcea0481a56509cd72de2"} Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.779073 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3374188c948d5083b2cf83930cd341e4e9bd4d8cfcbcea0481a56509cd72de2" Dec 11 22:04:02 crc kubenswrapper[4956]: I1211 22:04:02.779083 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/root-account-create-update-c4fh7" Dec 11 22:04:06 crc kubenswrapper[4956]: I1211 22:04:06.119026 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:04:06 crc kubenswrapper[4956]: I1211 22:04:06.239668 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-1" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.514257 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s"] Dec 11 22:04:12 crc kubenswrapper[4956]: E1211 22:04:12.515123 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aee315b-55ac-4721-8ad1-ff6128dc0b15" containerName="mariadb-account-create-update" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.515138 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aee315b-55ac-4721-8ad1-ff6128dc0b15" containerName="mariadb-account-create-update" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.515290 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aee315b-55ac-4721-8ad1-ff6128dc0b15" containerName="mariadb-account-create-update" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.516245 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.518088 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7p6h2" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.527064 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s"] Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.665846 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.665928 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9mxt\" (UniqueName: \"kubernetes.io/projected/3f33b928-05d6-489c-a9d0-1c23f69a7849-kube-api-access-f9mxt\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.666049 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.767993 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9mxt\" (UniqueName: \"kubernetes.io/projected/3f33b928-05d6-489c-a9d0-1c23f69a7849-kube-api-access-f9mxt\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.768226 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.768329 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.769046 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.769110 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.794151 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9mxt\" (UniqueName: \"kubernetes.io/projected/3f33b928-05d6-489c-a9d0-1c23f69a7849-kube-api-access-f9mxt\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:12 crc kubenswrapper[4956]: I1211 22:04:12.834478 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:13 crc kubenswrapper[4956]: I1211 22:04:13.349203 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s"] Dec 11 22:04:13 crc kubenswrapper[4956]: I1211 22:04:13.853383 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" event={"ID":"3f33b928-05d6-489c-a9d0-1c23f69a7849","Type":"ContainerStarted","Data":"e32b1fb361a18f1a39f482d064e623d03c3f9f96815602cd517b5069584baf16"} Dec 11 22:04:13 crc kubenswrapper[4956]: I1211 22:04:13.853433 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" event={"ID":"3f33b928-05d6-489c-a9d0-1c23f69a7849","Type":"ContainerStarted","Data":"4bb1a51c122045ac5ff24412ed8094610fcc85ac3477fd9cb4154db1214b2cd1"} Dec 11 22:04:14 crc kubenswrapper[4956]: I1211 22:04:14.865263 4956 generic.go:334] "Generic (PLEG): container finished" podID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerID="e32b1fb361a18f1a39f482d064e623d03c3f9f96815602cd517b5069584baf16" exitCode=0 Dec 11 22:04:14 crc kubenswrapper[4956]: I1211 22:04:14.865450 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" event={"ID":"3f33b928-05d6-489c-a9d0-1c23f69a7849","Type":"ContainerDied","Data":"e32b1fb361a18f1a39f482d064e623d03c3f9f96815602cd517b5069584baf16"} Dec 11 22:04:18 crc kubenswrapper[4956]: I1211 22:04:18.897032 4956 generic.go:334] "Generic (PLEG): container finished" podID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerID="af64c9f31512ec72334b888a8ab532c5ce9f8f48cbd681a7f986b7bc32cb5b36" exitCode=0 Dec 11 22:04:18 crc kubenswrapper[4956]: I1211 22:04:18.897228 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" event={"ID":"3f33b928-05d6-489c-a9d0-1c23f69a7849","Type":"ContainerDied","Data":"af64c9f31512ec72334b888a8ab532c5ce9f8f48cbd681a7f986b7bc32cb5b36"} Dec 11 22:04:19 crc kubenswrapper[4956]: I1211 22:04:19.905585 4956 generic.go:334] "Generic (PLEG): container finished" podID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerID="94b3c1b167852bf674168b9f3d0f38b0c3ece50b0c4ae720551948d280855f7c" exitCode=0 Dec 11 22:04:19 crc kubenswrapper[4956]: I1211 22:04:19.905688 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" event={"ID":"3f33b928-05d6-489c-a9d0-1c23f69a7849","Type":"ContainerDied","Data":"94b3c1b167852bf674168b9f3d0f38b0c3ece50b0c4ae720551948d280855f7c"} Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.462845 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.535262 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-util\") pod \"3f33b928-05d6-489c-a9d0-1c23f69a7849\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.535384 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-bundle\") pod \"3f33b928-05d6-489c-a9d0-1c23f69a7849\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.535419 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9mxt\" (UniqueName: \"kubernetes.io/projected/3f33b928-05d6-489c-a9d0-1c23f69a7849-kube-api-access-f9mxt\") pod \"3f33b928-05d6-489c-a9d0-1c23f69a7849\" (UID: \"3f33b928-05d6-489c-a9d0-1c23f69a7849\") " Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.537374 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-bundle" (OuterVolumeSpecName: "bundle") pod "3f33b928-05d6-489c-a9d0-1c23f69a7849" (UID: "3f33b928-05d6-489c-a9d0-1c23f69a7849"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.544384 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f33b928-05d6-489c-a9d0-1c23f69a7849-kube-api-access-f9mxt" (OuterVolumeSpecName: "kube-api-access-f9mxt") pod "3f33b928-05d6-489c-a9d0-1c23f69a7849" (UID: "3f33b928-05d6-489c-a9d0-1c23f69a7849"). InnerVolumeSpecName "kube-api-access-f9mxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.545213 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-util" (OuterVolumeSpecName: "util") pod "3f33b928-05d6-489c-a9d0-1c23f69a7849" (UID: "3f33b928-05d6-489c-a9d0-1c23f69a7849"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.637859 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.637921 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9mxt\" (UniqueName: \"kubernetes.io/projected/3f33b928-05d6-489c-a9d0-1c23f69a7849-kube-api-access-f9mxt\") on node \"crc\" DevicePath \"\"" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.637935 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3f33b928-05d6-489c-a9d0-1c23f69a7849-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.923312 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" event={"ID":"3f33b928-05d6-489c-a9d0-1c23f69a7849","Type":"ContainerDied","Data":"4bb1a51c122045ac5ff24412ed8094610fcc85ac3477fd9cb4154db1214b2cd1"} Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.923354 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bb1a51c122045ac5ff24412ed8094610fcc85ac3477fd9cb4154db1214b2cd1" Dec 11 22:04:21 crc kubenswrapper[4956]: I1211 22:04:21.923425 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.932244 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt"] Dec 11 22:04:27 crc kubenswrapper[4956]: E1211 22:04:27.933312 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="pull" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.933329 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="pull" Dec 11 22:04:27 crc kubenswrapper[4956]: E1211 22:04:27.933346 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="util" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.933353 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="util" Dec 11 22:04:27 crc kubenswrapper[4956]: E1211 22:04:27.933376 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="extract" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.933384 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="extract" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.933521 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f33b928-05d6-489c-a9d0-1c23f69a7849" containerName="extract" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.934014 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.935757 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-t979z" Dec 11 22:04:27 crc kubenswrapper[4956]: I1211 22:04:27.946182 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt"] Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.070419 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5plf\" (UniqueName: \"kubernetes.io/projected/33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1-kube-api-access-b5plf\") pod \"rabbitmq-cluster-operator-779fc9694b-ssktt\" (UID: \"33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.172178 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5plf\" (UniqueName: \"kubernetes.io/projected/33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1-kube-api-access-b5plf\") pod \"rabbitmq-cluster-operator-779fc9694b-ssktt\" (UID: \"33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.195646 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5plf\" (UniqueName: \"kubernetes.io/projected/33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1-kube-api-access-b5plf\") pod \"rabbitmq-cluster-operator-779fc9694b-ssktt\" (UID: \"33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.254530 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-t979z" Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.263313 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.513792 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt"] Dec 11 22:04:28 crc kubenswrapper[4956]: I1211 22:04:28.989809 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" event={"ID":"33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1","Type":"ContainerStarted","Data":"bb8d7e3fbf394d99cede0c6db72aaa66a26599f50a4dddf9ca6f70929c9edfcc"} Dec 11 22:04:34 crc kubenswrapper[4956]: I1211 22:04:34.081699 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" event={"ID":"33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1","Type":"ContainerStarted","Data":"728d6a03e074486ecddcb9433a2d6ecff27f3a17a3cb8e12ecfe5685cf9547b6"} Dec 11 22:04:34 crc kubenswrapper[4956]: I1211 22:04:34.100989 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-ssktt" podStartSLOduration=2.618713768 podStartE2EDuration="7.10096189s" podCreationTimestamp="2025-12-11 22:04:27 +0000 UTC" firstStartedPulling="2025-12-11 22:04:28.526258796 +0000 UTC m=+960.970636946" lastFinishedPulling="2025-12-11 22:04:33.008506918 +0000 UTC m=+965.452885068" observedRunningTime="2025-12-11 22:04:34.09614711 +0000 UTC m=+966.540525260" watchObservedRunningTime="2025-12-11 22:04:34.10096189 +0000 UTC m=+966.545340050" Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.987956 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.990306 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.992789 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"rabbitmq-server-conf" Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.992789 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-default-user" Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.993246 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-erlang-cookie" Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.993443 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-server-dockercfg-54t99" Dec 11 22:04:35 crc kubenswrapper[4956]: I1211 22:04:35.994283 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"rabbitmq-plugins-conf" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.001609 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148452 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e6c107b9-3006-441b-ad65-9293163bc60b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6c107b9-3006-441b-ad65-9293163bc60b\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148506 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148547 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148576 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148621 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148650 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148683 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz89t\" (UniqueName: \"kubernetes.io/projected/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-kube-api-access-zz89t\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.148708 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250452 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250516 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250556 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz89t\" (UniqueName: \"kubernetes.io/projected/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-kube-api-access-zz89t\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250583 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250637 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e6c107b9-3006-441b-ad65-9293163bc60b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6c107b9-3006-441b-ad65-9293163bc60b\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250668 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250706 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.250732 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.251355 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.251365 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.253425 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.256541 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.257375 4956 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.257412 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e6c107b9-3006-441b-ad65-9293163bc60b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6c107b9-3006-441b-ad65-9293163bc60b\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e214d8ee9149bafaf413653cc94db53ea1f1aef8ecf3f1d5b75a81787abc637b/globalmount\"" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.258790 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.267505 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.290485 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz89t\" (UniqueName: \"kubernetes.io/projected/b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7-kube-api-access-zz89t\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.306476 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e6c107b9-3006-441b-ad65-9293163bc60b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e6c107b9-3006-441b-ad65-9293163bc60b\") pod \"rabbitmq-server-0\" (UID: \"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.605390 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:04:36 crc kubenswrapper[4956]: I1211 22:04:36.957981 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 11 22:04:36 crc kubenswrapper[4956]: W1211 22:04:36.961052 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3a33e0c_0ca4_4df5_9a83_e24f0659c9a7.slice/crio-7a2e0c6b8f36ddcac64190bb906bcd9eca199cb50f20964953536cb2f21ab84d WatchSource:0}: Error finding container 7a2e0c6b8f36ddcac64190bb906bcd9eca199cb50f20964953536cb2f21ab84d: Status 404 returned error can't find the container with id 7a2e0c6b8f36ddcac64190bb906bcd9eca199cb50f20964953536cb2f21ab84d Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.098982 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7","Type":"ContainerStarted","Data":"7a2e0c6b8f36ddcac64190bb906bcd9eca199cb50f20964953536cb2f21ab84d"} Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.671994 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-sn9vb"] Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.672888 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.674515 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-zm6dx" Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.680826 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-sn9vb"] Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.872403 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9km8\" (UniqueName: \"kubernetes.io/projected/0193dd7c-70b9-4978-b06a-878e08014d5c-kube-api-access-q9km8\") pod \"keystone-operator-index-sn9vb\" (UID: \"0193dd7c-70b9-4978-b06a-878e08014d5c\") " pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.974809 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9km8\" (UniqueName: \"kubernetes.io/projected/0193dd7c-70b9-4978-b06a-878e08014d5c-kube-api-access-q9km8\") pod \"keystone-operator-index-sn9vb\" (UID: \"0193dd7c-70b9-4978-b06a-878e08014d5c\") " pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:37 crc kubenswrapper[4956]: I1211 22:04:37.996003 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9km8\" (UniqueName: \"kubernetes.io/projected/0193dd7c-70b9-4978-b06a-878e08014d5c-kube-api-access-q9km8\") pod \"keystone-operator-index-sn9vb\" (UID: \"0193dd7c-70b9-4978-b06a-878e08014d5c\") " pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:38 crc kubenswrapper[4956]: I1211 22:04:38.007394 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:38 crc kubenswrapper[4956]: I1211 22:04:38.456080 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-sn9vb"] Dec 11 22:04:39 crc kubenswrapper[4956]: I1211 22:04:39.110824 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-sn9vb" event={"ID":"0193dd7c-70b9-4978-b06a-878e08014d5c","Type":"ContainerStarted","Data":"eff52de5188b80430ab996a56ad69468365c3b881e60ed9b0a5a110061f01914"} Dec 11 22:04:46 crc kubenswrapper[4956]: I1211 22:04:46.200184 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-sn9vb" event={"ID":"0193dd7c-70b9-4978-b06a-878e08014d5c","Type":"ContainerStarted","Data":"2635fb8d305542ff40d63acd3b7223c417a64d11dc4592c6e06fd5f5813c2805"} Dec 11 22:04:46 crc kubenswrapper[4956]: I1211 22:04:46.223207 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-sn9vb" podStartSLOduration=2.161885183 podStartE2EDuration="9.223191324s" podCreationTimestamp="2025-12-11 22:04:37 +0000 UTC" firstStartedPulling="2025-12-11 22:04:38.461995243 +0000 UTC m=+970.906373393" lastFinishedPulling="2025-12-11 22:04:45.523301384 +0000 UTC m=+977.967679534" observedRunningTime="2025-12-11 22:04:46.219706809 +0000 UTC m=+978.664084979" watchObservedRunningTime="2025-12-11 22:04:46.223191324 +0000 UTC m=+978.667569474" Dec 11 22:04:47 crc kubenswrapper[4956]: I1211 22:04:47.210562 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7","Type":"ContainerStarted","Data":"1fad63cc376a6f16633f68c384ac50b4672d3f94c420b43114c5870a69a7c6ff"} Dec 11 22:04:48 crc kubenswrapper[4956]: I1211 22:04:48.007983 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:48 crc kubenswrapper[4956]: I1211 22:04:48.008067 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:48 crc kubenswrapper[4956]: I1211 22:04:48.039889 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:04:58 crc kubenswrapper[4956]: I1211 22:04:58.033824 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-sn9vb" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.708567 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm"] Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.710846 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.713349 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7p6h2" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.719259 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm"] Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.799466 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-bundle\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.800080 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czkwx\" (UniqueName: \"kubernetes.io/projected/d1b83c20-2930-4926-9a24-84a05bfe56a9-kube-api-access-czkwx\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.800288 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-util\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.901093 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-bundle\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.901155 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czkwx\" (UniqueName: \"kubernetes.io/projected/d1b83c20-2930-4926-9a24-84a05bfe56a9-kube-api-access-czkwx\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.901237 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-util\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.901808 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-util\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.901842 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-bundle\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:08 crc kubenswrapper[4956]: I1211 22:05:08.923882 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czkwx\" (UniqueName: \"kubernetes.io/projected/d1b83c20-2930-4926-9a24-84a05bfe56a9-kube-api-access-czkwx\") pod \"3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:09 crc kubenswrapper[4956]: I1211 22:05:09.044728 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:09 crc kubenswrapper[4956]: I1211 22:05:09.443280 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm"] Dec 11 22:05:10 crc kubenswrapper[4956]: I1211 22:05:10.401260 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerID="b1b5795be70095bbc8db87eab08b61b90791700b486b413462043c518335159a" exitCode=0 Dec 11 22:05:10 crc kubenswrapper[4956]: I1211 22:05:10.401321 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" event={"ID":"d1b83c20-2930-4926-9a24-84a05bfe56a9","Type":"ContainerDied","Data":"b1b5795be70095bbc8db87eab08b61b90791700b486b413462043c518335159a"} Dec 11 22:05:10 crc kubenswrapper[4956]: I1211 22:05:10.401544 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" event={"ID":"d1b83c20-2930-4926-9a24-84a05bfe56a9","Type":"ContainerStarted","Data":"7edc9267cb70f151aad892c3665827f941a37bd4ad9228a452139db5a7150de5"} Dec 11 22:05:13 crc kubenswrapper[4956]: I1211 22:05:13.421399 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerID="774c502e6c62eead29318cc73c9ba432232fc54cdee52ee6f17ed39e87c1ea33" exitCode=0 Dec 11 22:05:13 crc kubenswrapper[4956]: I1211 22:05:13.421517 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" event={"ID":"d1b83c20-2930-4926-9a24-84a05bfe56a9","Type":"ContainerDied","Data":"774c502e6c62eead29318cc73c9ba432232fc54cdee52ee6f17ed39e87c1ea33"} Dec 11 22:05:14 crc kubenswrapper[4956]: I1211 22:05:14.430482 4956 generic.go:334] "Generic (PLEG): container finished" podID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerID="3f8120234f65c242b6e26993651fd3273ea164ae26c849391e9ec28026d693a1" exitCode=0 Dec 11 22:05:14 crc kubenswrapper[4956]: I1211 22:05:14.430580 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" event={"ID":"d1b83c20-2930-4926-9a24-84a05bfe56a9","Type":"ContainerDied","Data":"3f8120234f65c242b6e26993651fd3273ea164ae26c849391e9ec28026d693a1"} Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.750873 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.904517 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-util\") pod \"d1b83c20-2930-4926-9a24-84a05bfe56a9\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.904740 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czkwx\" (UniqueName: \"kubernetes.io/projected/d1b83c20-2930-4926-9a24-84a05bfe56a9-kube-api-access-czkwx\") pod \"d1b83c20-2930-4926-9a24-84a05bfe56a9\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.904835 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-bundle\") pod \"d1b83c20-2930-4926-9a24-84a05bfe56a9\" (UID: \"d1b83c20-2930-4926-9a24-84a05bfe56a9\") " Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.905835 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-bundle" (OuterVolumeSpecName: "bundle") pod "d1b83c20-2930-4926-9a24-84a05bfe56a9" (UID: "d1b83c20-2930-4926-9a24-84a05bfe56a9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.912087 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1b83c20-2930-4926-9a24-84a05bfe56a9-kube-api-access-czkwx" (OuterVolumeSpecName: "kube-api-access-czkwx") pod "d1b83c20-2930-4926-9a24-84a05bfe56a9" (UID: "d1b83c20-2930-4926-9a24-84a05bfe56a9"). InnerVolumeSpecName "kube-api-access-czkwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:05:15 crc kubenswrapper[4956]: I1211 22:05:15.915387 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-util" (OuterVolumeSpecName: "util") pod "d1b83c20-2930-4926-9a24-84a05bfe56a9" (UID: "d1b83c20-2930-4926-9a24-84a05bfe56a9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.007016 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czkwx\" (UniqueName: \"kubernetes.io/projected/d1b83c20-2930-4926-9a24-84a05bfe56a9-kube-api-access-czkwx\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.007056 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.007067 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d1b83c20-2930-4926-9a24-84a05bfe56a9-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.449531 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" event={"ID":"d1b83c20-2930-4926-9a24-84a05bfe56a9","Type":"ContainerDied","Data":"7edc9267cb70f151aad892c3665827f941a37bd4ad9228a452139db5a7150de5"} Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.449589 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7edc9267cb70f151aad892c3665827f941a37bd4ad9228a452139db5a7150de5" Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.449711 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm" Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.888622 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:05:16 crc kubenswrapper[4956]: I1211 22:05:16.890048 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:05:19 crc kubenswrapper[4956]: I1211 22:05:19.480196 4956 generic.go:334] "Generic (PLEG): container finished" podID="b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7" containerID="1fad63cc376a6f16633f68c384ac50b4672d3f94c420b43114c5870a69a7c6ff" exitCode=0 Dec 11 22:05:19 crc kubenswrapper[4956]: I1211 22:05:19.480314 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7","Type":"ContainerDied","Data":"1fad63cc376a6f16633f68c384ac50b4672d3f94c420b43114c5870a69a7c6ff"} Dec 11 22:05:20 crc kubenswrapper[4956]: I1211 22:05:20.489505 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7","Type":"ContainerStarted","Data":"006b7c034e4c8a5e75a0ee3e62bafaf518a228095933b8b98ddf0507152e7be9"} Dec 11 22:05:20 crc kubenswrapper[4956]: I1211 22:05:20.490064 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:05:20 crc kubenswrapper[4956]: I1211 22:05:20.513594 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.957188472 podStartE2EDuration="46.51357256s" podCreationTimestamp="2025-12-11 22:04:34 +0000 UTC" firstStartedPulling="2025-12-11 22:04:36.96371904 +0000 UTC m=+969.408097190" lastFinishedPulling="2025-12-11 22:04:45.520103108 +0000 UTC m=+977.964481278" observedRunningTime="2025-12-11 22:05:20.508175996 +0000 UTC m=+1012.952554156" watchObservedRunningTime="2025-12-11 22:05:20.51357256 +0000 UTC m=+1012.957950710" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.523113 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-794987f786-cml4t"] Dec 11 22:05:24 crc kubenswrapper[4956]: E1211 22:05:24.523998 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="extract" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.524015 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="extract" Dec 11 22:05:24 crc kubenswrapper[4956]: E1211 22:05:24.524031 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="util" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.524038 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="util" Dec 11 22:05:24 crc kubenswrapper[4956]: E1211 22:05:24.524059 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="pull" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.524068 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="pull" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.524197 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1b83c20-2930-4926-9a24-84a05bfe56a9" containerName="extract" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.524720 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.526747 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-szr4c" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.532365 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.538142 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-794987f786-cml4t"] Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.656319 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/015cecc7-360c-4399-83c5-f13692a5e145-webhook-cert\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.656366 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh9hw\" (UniqueName: \"kubernetes.io/projected/015cecc7-360c-4399-83c5-f13692a5e145-kube-api-access-lh9hw\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.656414 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/015cecc7-360c-4399-83c5-f13692a5e145-apiservice-cert\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.762505 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/015cecc7-360c-4399-83c5-f13692a5e145-webhook-cert\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.762568 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh9hw\" (UniqueName: \"kubernetes.io/projected/015cecc7-360c-4399-83c5-f13692a5e145-kube-api-access-lh9hw\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.762624 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/015cecc7-360c-4399-83c5-f13692a5e145-apiservice-cert\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.768738 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/015cecc7-360c-4399-83c5-f13692a5e145-apiservice-cert\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.771916 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/015cecc7-360c-4399-83c5-f13692a5e145-webhook-cert\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.782328 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh9hw\" (UniqueName: \"kubernetes.io/projected/015cecc7-360c-4399-83c5-f13692a5e145-kube-api-access-lh9hw\") pod \"keystone-operator-controller-manager-794987f786-cml4t\" (UID: \"015cecc7-360c-4399-83c5-f13692a5e145\") " pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:24 crc kubenswrapper[4956]: I1211 22:05:24.845314 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:25 crc kubenswrapper[4956]: I1211 22:05:25.347528 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-794987f786-cml4t"] Dec 11 22:05:25 crc kubenswrapper[4956]: I1211 22:05:25.519933 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" event={"ID":"015cecc7-360c-4399-83c5-f13692a5e145","Type":"ContainerStarted","Data":"d8b1d967a8ceeb83646dd652d46924d4e0758708143858102b907a0cc2a31892"} Dec 11 22:05:29 crc kubenswrapper[4956]: I1211 22:05:29.599777 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" event={"ID":"015cecc7-360c-4399-83c5-f13692a5e145","Type":"ContainerStarted","Data":"26638bdf174a76cd19058cbfec1a8306733e6f23a8825fedbf0b7c415cd7cb97"} Dec 11 22:05:29 crc kubenswrapper[4956]: I1211 22:05:29.600209 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:29 crc kubenswrapper[4956]: I1211 22:05:29.619284 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" podStartSLOduration=1.674831967 podStartE2EDuration="5.619263709s" podCreationTimestamp="2025-12-11 22:05:24 +0000 UTC" firstStartedPulling="2025-12-11 22:05:25.367921833 +0000 UTC m=+1017.812299983" lastFinishedPulling="2025-12-11 22:05:29.312353575 +0000 UTC m=+1021.756731725" observedRunningTime="2025-12-11 22:05:29.618049626 +0000 UTC m=+1022.062427786" watchObservedRunningTime="2025-12-11 22:05:29.619263709 +0000 UTC m=+1022.063641859" Dec 11 22:05:34 crc kubenswrapper[4956]: I1211 22:05:34.850760 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-794987f786-cml4t" Dec 11 22:05:36 crc kubenswrapper[4956]: I1211 22:05:36.608807 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.412430 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-db-create-dz86p"] Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.413560 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.417609 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-6e56-account-create-update-7csrp"] Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.418330 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.421440 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-db-secret" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.425726 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-create-dz86p"] Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.431843 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-6e56-account-create-update-7csrp"] Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.552555 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7swb\" (UniqueName: \"kubernetes.io/projected/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-kube-api-access-r7swb\") pod \"keystone-6e56-account-create-update-7csrp\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.553270 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g4jq\" (UniqueName: \"kubernetes.io/projected/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-kube-api-access-8g4jq\") pod \"keystone-db-create-dz86p\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.553334 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-operator-scripts\") pod \"keystone-6e56-account-create-update-7csrp\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.553382 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-operator-scripts\") pod \"keystone-db-create-dz86p\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.655096 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7swb\" (UniqueName: \"kubernetes.io/projected/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-kube-api-access-r7swb\") pod \"keystone-6e56-account-create-update-7csrp\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.655205 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g4jq\" (UniqueName: \"kubernetes.io/projected/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-kube-api-access-8g4jq\") pod \"keystone-db-create-dz86p\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.655237 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-operator-scripts\") pod \"keystone-6e56-account-create-update-7csrp\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.655259 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-operator-scripts\") pod \"keystone-db-create-dz86p\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.656091 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-operator-scripts\") pod \"keystone-db-create-dz86p\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.656328 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-operator-scripts\") pod \"keystone-6e56-account-create-update-7csrp\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.680678 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g4jq\" (UniqueName: \"kubernetes.io/projected/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-kube-api-access-8g4jq\") pod \"keystone-db-create-dz86p\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.682039 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7swb\" (UniqueName: \"kubernetes.io/projected/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-kube-api-access-r7swb\") pod \"keystone-6e56-account-create-update-7csrp\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.746668 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:40 crc kubenswrapper[4956]: I1211 22:05:40.756444 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:41 crc kubenswrapper[4956]: I1211 22:05:41.014399 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-6e56-account-create-update-7csrp"] Dec 11 22:05:41 crc kubenswrapper[4956]: W1211 22:05:41.018840 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3573b9e_0775_4bee_bb5f_df5a7c4f62fe.slice/crio-8a1e2d654f90d0f4d27ae32f2b374edbe271c79b0435ea3a88aa190aea45589d WatchSource:0}: Error finding container 8a1e2d654f90d0f4d27ae32f2b374edbe271c79b0435ea3a88aa190aea45589d: Status 404 returned error can't find the container with id 8a1e2d654f90d0f4d27ae32f2b374edbe271c79b0435ea3a88aa190aea45589d Dec 11 22:05:41 crc kubenswrapper[4956]: I1211 22:05:41.163460 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-create-dz86p"] Dec 11 22:05:41 crc kubenswrapper[4956]: W1211 22:05:41.173992 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fbb0d97_cbdf_4c82_bf2e_69f422c76813.slice/crio-b725c47f047b58d6a0f00883bdd83c4139370c1eb1d2648e321fbfd28a0718ff WatchSource:0}: Error finding container b725c47f047b58d6a0f00883bdd83c4139370c1eb1d2648e321fbfd28a0718ff: Status 404 returned error can't find the container with id b725c47f047b58d6a0f00883bdd83c4139370c1eb1d2648e321fbfd28a0718ff Dec 11 22:05:41 crc kubenswrapper[4956]: I1211 22:05:41.806423 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" event={"ID":"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe","Type":"ContainerStarted","Data":"8a1e2d654f90d0f4d27ae32f2b374edbe271c79b0435ea3a88aa190aea45589d"} Dec 11 22:05:41 crc kubenswrapper[4956]: I1211 22:05:41.808918 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-dz86p" event={"ID":"2fbb0d97-cbdf-4c82-bf2e-69f422c76813","Type":"ContainerStarted","Data":"b725c47f047b58d6a0f00883bdd83c4139370c1eb1d2648e321fbfd28a0718ff"} Dec 11 22:05:42 crc kubenswrapper[4956]: I1211 22:05:42.816256 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" event={"ID":"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe","Type":"ContainerStarted","Data":"2454fcb60eec66f2260b873c63ff19c69059df1781620b1d8527f9119ed57b68"} Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.273724 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" podStartSLOduration=3.273690761 podStartE2EDuration="3.273690761s" podCreationTimestamp="2025-12-11 22:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:05:42.832452865 +0000 UTC m=+1035.276831015" watchObservedRunningTime="2025-12-11 22:05:43.273690761 +0000 UTC m=+1035.718068951" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.280766 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-index-x9bqj"] Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.282398 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.291035 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-index-dockercfg-hjsjt" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.299068 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-x9bqj"] Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.301087 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzd89\" (UniqueName: \"kubernetes.io/projected/6c5d5efc-26a4-46e9-981b-1e9a6f134fda-kube-api-access-wzd89\") pod \"barbican-operator-index-x9bqj\" (UID: \"6c5d5efc-26a4-46e9-981b-1e9a6f134fda\") " pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.401949 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzd89\" (UniqueName: \"kubernetes.io/projected/6c5d5efc-26a4-46e9-981b-1e9a6f134fda-kube-api-access-wzd89\") pod \"barbican-operator-index-x9bqj\" (UID: \"6c5d5efc-26a4-46e9-981b-1e9a6f134fda\") " pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.423105 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzd89\" (UniqueName: \"kubernetes.io/projected/6c5d5efc-26a4-46e9-981b-1e9a6f134fda-kube-api-access-wzd89\") pod \"barbican-operator-index-x9bqj\" (UID: \"6c5d5efc-26a4-46e9-981b-1e9a6f134fda\") " pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.606839 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.823553 4956 generic.go:334] "Generic (PLEG): container finished" podID="b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" containerID="2454fcb60eec66f2260b873c63ff19c69059df1781620b1d8527f9119ed57b68" exitCode=0 Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.823640 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" event={"ID":"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe","Type":"ContainerDied","Data":"2454fcb60eec66f2260b873c63ff19c69059df1781620b1d8527f9119ed57b68"} Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.825540 4956 generic.go:334] "Generic (PLEG): container finished" podID="2fbb0d97-cbdf-4c82-bf2e-69f422c76813" containerID="e836a5eb58fc684c1049070aab0a3ea3ecc48bef21df4157d396696ab7904452" exitCode=0 Dec 11 22:05:43 crc kubenswrapper[4956]: I1211 22:05:43.825581 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-dz86p" event={"ID":"2fbb0d97-cbdf-4c82-bf2e-69f422c76813","Type":"ContainerDied","Data":"e836a5eb58fc684c1049070aab0a3ea3ecc48bef21df4157d396696ab7904452"} Dec 11 22:05:44 crc kubenswrapper[4956]: I1211 22:05:44.410829 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-x9bqj"] Dec 11 22:05:44 crc kubenswrapper[4956]: W1211 22:05:44.421054 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c5d5efc_26a4_46e9_981b_1e9a6f134fda.slice/crio-766dd1f2211d612429ed27ed356b100056aa65ddee9909d1f5a869d6e54f037a WatchSource:0}: Error finding container 766dd1f2211d612429ed27ed356b100056aa65ddee9909d1f5a869d6e54f037a: Status 404 returned error can't find the container with id 766dd1f2211d612429ed27ed356b100056aa65ddee9909d1f5a869d6e54f037a Dec 11 22:05:44 crc kubenswrapper[4956]: I1211 22:05:44.422510 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 22:05:44 crc kubenswrapper[4956]: I1211 22:05:44.834949 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-x9bqj" event={"ID":"6c5d5efc-26a4-46e9-981b-1e9a6f134fda","Type":"ContainerStarted","Data":"766dd1f2211d612429ed27ed356b100056aa65ddee9909d1f5a869d6e54f037a"} Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.166550 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.244103 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.266644 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7swb\" (UniqueName: \"kubernetes.io/projected/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-kube-api-access-r7swb\") pod \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.266720 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-operator-scripts\") pod \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\" (UID: \"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe\") " Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.268288 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" (UID: "b3573b9e-0775-4bee-bb5f-df5a7c4f62fe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.273977 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-kube-api-access-r7swb" (OuterVolumeSpecName: "kube-api-access-r7swb") pod "b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" (UID: "b3573b9e-0775-4bee-bb5f-df5a7c4f62fe"). InnerVolumeSpecName "kube-api-access-r7swb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.367560 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-operator-scripts\") pod \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.367707 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g4jq\" (UniqueName: \"kubernetes.io/projected/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-kube-api-access-8g4jq\") pod \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\" (UID: \"2fbb0d97-cbdf-4c82-bf2e-69f422c76813\") " Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.367978 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2fbb0d97-cbdf-4c82-bf2e-69f422c76813" (UID: "2fbb0d97-cbdf-4c82-bf2e-69f422c76813"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.368221 4956 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.368252 4956 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.368270 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7swb\" (UniqueName: \"kubernetes.io/projected/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe-kube-api-access-r7swb\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.373038 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-kube-api-access-8g4jq" (OuterVolumeSpecName: "kube-api-access-8g4jq") pod "2fbb0d97-cbdf-4c82-bf2e-69f422c76813" (UID: "2fbb0d97-cbdf-4c82-bf2e-69f422c76813"). InnerVolumeSpecName "kube-api-access-8g4jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.469932 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g4jq\" (UniqueName: \"kubernetes.io/projected/2fbb0d97-cbdf-4c82-bf2e-69f422c76813-kube-api-access-8g4jq\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.844711 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" event={"ID":"b3573b9e-0775-4bee-bb5f-df5a7c4f62fe","Type":"ContainerDied","Data":"8a1e2d654f90d0f4d27ae32f2b374edbe271c79b0435ea3a88aa190aea45589d"} Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.844742 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-6e56-account-create-update-7csrp" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.844801 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a1e2d654f90d0f4d27ae32f2b374edbe271c79b0435ea3a88aa190aea45589d" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.846126 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-dz86p" event={"ID":"2fbb0d97-cbdf-4c82-bf2e-69f422c76813","Type":"ContainerDied","Data":"b725c47f047b58d6a0f00883bdd83c4139370c1eb1d2648e321fbfd28a0718ff"} Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.846176 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b725c47f047b58d6a0f00883bdd83c4139370c1eb1d2648e321fbfd28a0718ff" Dec 11 22:05:45 crc kubenswrapper[4956]: I1211 22:05:45.846185 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-dz86p" Dec 11 22:05:46 crc kubenswrapper[4956]: I1211 22:05:46.888355 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:05:46 crc kubenswrapper[4956]: I1211 22:05:46.898249 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:05:47 crc kubenswrapper[4956]: I1211 22:05:47.659228 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-x9bqj"] Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.267752 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-index-2mpd9"] Dec 11 22:05:48 crc kubenswrapper[4956]: E1211 22:05:48.269187 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" containerName="mariadb-account-create-update" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.269291 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" containerName="mariadb-account-create-update" Dec 11 22:05:48 crc kubenswrapper[4956]: E1211 22:05:48.269405 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbb0d97-cbdf-4c82-bf2e-69f422c76813" containerName="mariadb-database-create" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.269538 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbb0d97-cbdf-4c82-bf2e-69f422c76813" containerName="mariadb-database-create" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.269941 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" containerName="mariadb-account-create-update" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.270069 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fbb0d97-cbdf-4c82-bf2e-69f422c76813" containerName="mariadb-database-create" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.270806 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.284017 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-2mpd9"] Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.423359 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw5kh\" (UniqueName: \"kubernetes.io/projected/5f81ac27-4de7-4fec-bd58-51936767a898-kube-api-access-jw5kh\") pod \"barbican-operator-index-2mpd9\" (UID: \"5f81ac27-4de7-4fec-bd58-51936767a898\") " pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.524942 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw5kh\" (UniqueName: \"kubernetes.io/projected/5f81ac27-4de7-4fec-bd58-51936767a898-kube-api-access-jw5kh\") pod \"barbican-operator-index-2mpd9\" (UID: \"5f81ac27-4de7-4fec-bd58-51936767a898\") " pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.543996 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw5kh\" (UniqueName: \"kubernetes.io/projected/5f81ac27-4de7-4fec-bd58-51936767a898-kube-api-access-jw5kh\") pod \"barbican-operator-index-2mpd9\" (UID: \"5f81ac27-4de7-4fec-bd58-51936767a898\") " pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:48 crc kubenswrapper[4956]: I1211 22:05:48.607175 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:49 crc kubenswrapper[4956]: I1211 22:05:49.243981 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-2mpd9"] Dec 11 22:05:49 crc kubenswrapper[4956]: W1211 22:05:49.251868 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f81ac27_4de7_4fec_bd58_51936767a898.slice/crio-1485a5fc735b75d78259bc40e05115259fdcfa29c9fae6a6bfbf937cc036c96e WatchSource:0}: Error finding container 1485a5fc735b75d78259bc40e05115259fdcfa29c9fae6a6bfbf937cc036c96e: Status 404 returned error can't find the container with id 1485a5fc735b75d78259bc40e05115259fdcfa29c9fae6a6bfbf937cc036c96e Dec 11 22:05:49 crc kubenswrapper[4956]: I1211 22:05:49.881103 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-x9bqj" event={"ID":"6c5d5efc-26a4-46e9-981b-1e9a6f134fda","Type":"ContainerStarted","Data":"79beb56297bed64bb06856c31ba958dcb685edf028a0a5d28a7cd5d5e2484a98"} Dec 11 22:05:49 crc kubenswrapper[4956]: I1211 22:05:49.881188 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-index-x9bqj" podUID="6c5d5efc-26a4-46e9-981b-1e9a6f134fda" containerName="registry-server" containerID="cri-o://79beb56297bed64bb06856c31ba958dcb685edf028a0a5d28a7cd5d5e2484a98" gracePeriod=2 Dec 11 22:05:49 crc kubenswrapper[4956]: I1211 22:05:49.882092 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-2mpd9" event={"ID":"5f81ac27-4de7-4fec-bd58-51936767a898","Type":"ContainerStarted","Data":"1485a5fc735b75d78259bc40e05115259fdcfa29c9fae6a6bfbf937cc036c96e"} Dec 11 22:05:49 crc kubenswrapper[4956]: I1211 22:05:49.934413 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-index-x9bqj" podStartSLOduration=2.435175392 podStartE2EDuration="6.93438137s" podCreationTimestamp="2025-12-11 22:05:43 +0000 UTC" firstStartedPulling="2025-12-11 22:05:44.422198895 +0000 UTC m=+1036.866577055" lastFinishedPulling="2025-12-11 22:05:48.921404883 +0000 UTC m=+1041.365783033" observedRunningTime="2025-12-11 22:05:49.930937248 +0000 UTC m=+1042.375315398" watchObservedRunningTime="2025-12-11 22:05:49.93438137 +0000 UTC m=+1042.378759520" Dec 11 22:05:50 crc kubenswrapper[4956]: I1211 22:05:50.890761 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-2mpd9" event={"ID":"5f81ac27-4de7-4fec-bd58-51936767a898","Type":"ContainerStarted","Data":"417a780819e6988e87cd1a821205fd86b66cdeb206f1762851494f12c889fcb7"} Dec 11 22:05:50 crc kubenswrapper[4956]: I1211 22:05:50.894032 4956 generic.go:334] "Generic (PLEG): container finished" podID="6c5d5efc-26a4-46e9-981b-1e9a6f134fda" containerID="79beb56297bed64bb06856c31ba958dcb685edf028a0a5d28a7cd5d5e2484a98" exitCode=0 Dec 11 22:05:50 crc kubenswrapper[4956]: I1211 22:05:50.894072 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-x9bqj" event={"ID":"6c5d5efc-26a4-46e9-981b-1e9a6f134fda","Type":"ContainerDied","Data":"79beb56297bed64bb06856c31ba958dcb685edf028a0a5d28a7cd5d5e2484a98"} Dec 11 22:05:50 crc kubenswrapper[4956]: I1211 22:05:50.913427 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-index-2mpd9" podStartSLOduration=1.9862671870000002 podStartE2EDuration="2.913407278s" podCreationTimestamp="2025-12-11 22:05:48 +0000 UTC" firstStartedPulling="2025-12-11 22:05:49.257605086 +0000 UTC m=+1041.701983236" lastFinishedPulling="2025-12-11 22:05:50.184745167 +0000 UTC m=+1042.629123327" observedRunningTime="2025-12-11 22:05:50.910711155 +0000 UTC m=+1043.355089305" watchObservedRunningTime="2025-12-11 22:05:50.913407278 +0000 UTC m=+1043.357785428" Dec 11 22:05:50 crc kubenswrapper[4956]: I1211 22:05:50.964886 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.094929 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzd89\" (UniqueName: \"kubernetes.io/projected/6c5d5efc-26a4-46e9-981b-1e9a6f134fda-kube-api-access-wzd89\") pod \"6c5d5efc-26a4-46e9-981b-1e9a6f134fda\" (UID: \"6c5d5efc-26a4-46e9-981b-1e9a6f134fda\") " Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.098431 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-db-sync-kws2f"] Dec 11 22:05:51 crc kubenswrapper[4956]: E1211 22:05:51.098701 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5d5efc-26a4-46e9-981b-1e9a6f134fda" containerName="registry-server" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.098722 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5d5efc-26a4-46e9-981b-1e9a6f134fda" containerName="registry-server" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.098909 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c5d5efc-26a4-46e9-981b-1e9a6f134fda" containerName="registry-server" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.099410 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.103206 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.103389 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.103563 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-rs76m" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.104831 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c5d5efc-26a4-46e9-981b-1e9a6f134fda-kube-api-access-wzd89" (OuterVolumeSpecName: "kube-api-access-wzd89") pod "6c5d5efc-26a4-46e9-981b-1e9a6f134fda" (UID: "6c5d5efc-26a4-46e9-981b-1e9a6f134fda"). InnerVolumeSpecName "kube-api-access-wzd89". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.109128 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-kws2f"] Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.113969 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.196329 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-config-data\") pod \"keystone-db-sync-kws2f\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.196477 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c78td\" (UniqueName: \"kubernetes.io/projected/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-kube-api-access-c78td\") pod \"keystone-db-sync-kws2f\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.196578 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzd89\" (UniqueName: \"kubernetes.io/projected/6c5d5efc-26a4-46e9-981b-1e9a6f134fda-kube-api-access-wzd89\") on node \"crc\" DevicePath \"\"" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.298164 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-config-data\") pod \"keystone-db-sync-kws2f\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.298226 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c78td\" (UniqueName: \"kubernetes.io/projected/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-kube-api-access-c78td\") pod \"keystone-db-sync-kws2f\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.305012 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-config-data\") pod \"keystone-db-sync-kws2f\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.320307 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c78td\" (UniqueName: \"kubernetes.io/projected/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-kube-api-access-c78td\") pod \"keystone-db-sync-kws2f\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.438837 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.874822 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-kws2f"] Dec 11 22:05:51 crc kubenswrapper[4956]: W1211 22:05:51.878008 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fbd3dcf_e490_45e0_b94d_ab44f70e176e.slice/crio-13107eefd4dbda2f9b83a620f1c016c5f40a9ab33c0426f38af23f5fba93162c WatchSource:0}: Error finding container 13107eefd4dbda2f9b83a620f1c016c5f40a9ab33c0426f38af23f5fba93162c: Status 404 returned error can't find the container with id 13107eefd4dbda2f9b83a620f1c016c5f40a9ab33c0426f38af23f5fba93162c Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.902220 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-x9bqj" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.902895 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-x9bqj" event={"ID":"6c5d5efc-26a4-46e9-981b-1e9a6f134fda","Type":"ContainerDied","Data":"766dd1f2211d612429ed27ed356b100056aa65ddee9909d1f5a869d6e54f037a"} Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.902933 4956 scope.go:117] "RemoveContainer" containerID="79beb56297bed64bb06856c31ba958dcb685edf028a0a5d28a7cd5d5e2484a98" Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.904588 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-kws2f" event={"ID":"2fbd3dcf-e490-45e0-b94d-ab44f70e176e","Type":"ContainerStarted","Data":"13107eefd4dbda2f9b83a620f1c016c5f40a9ab33c0426f38af23f5fba93162c"} Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.937848 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-x9bqj"] Dec 11 22:05:51 crc kubenswrapper[4956]: I1211 22:05:51.943514 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-index-x9bqj"] Dec 11 22:05:52 crc kubenswrapper[4956]: I1211 22:05:52.031387 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c5d5efc-26a4-46e9-981b-1e9a6f134fda" path="/var/lib/kubelet/pods/6c5d5efc-26a4-46e9-981b-1e9a6f134fda/volumes" Dec 11 22:05:58 crc kubenswrapper[4956]: I1211 22:05:58.630434 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:58 crc kubenswrapper[4956]: I1211 22:05:58.631127 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:58 crc kubenswrapper[4956]: I1211 22:05:58.667940 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:05:58 crc kubenswrapper[4956]: I1211 22:05:58.997186 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-index-2mpd9" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.519980 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr"] Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.522257 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.526388 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7p6h2" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.531022 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr"] Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.572750 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-bundle\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.573100 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-util\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.573130 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw5th\" (UniqueName: \"kubernetes.io/projected/868cfe57-acca-413d-a5fc-3c856d30ac3f-kube-api-access-bw5th\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.674371 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-bundle\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.674436 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-util\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.674476 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw5th\" (UniqueName: \"kubernetes.io/projected/868cfe57-acca-413d-a5fc-3c856d30ac3f-kube-api-access-bw5th\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.675464 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-bundle\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.675732 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-util\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.705939 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw5th\" (UniqueName: \"kubernetes.io/projected/868cfe57-acca-413d-a5fc-3c856d30ac3f-kube-api-access-bw5th\") pod \"43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:01 crc kubenswrapper[4956]: I1211 22:06:01.844076 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:04 crc kubenswrapper[4956]: E1211 22:06:04.062429 4956 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-keystone:current-podified" Dec 11 22:06:04 crc kubenswrapper[4956]: E1211 22:06:04.063578 4956 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c78td,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-kws2f_swift-kuttl-tests(2fbd3dcf-e490-45e0-b94d-ab44f70e176e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 22:06:04 crc kubenswrapper[4956]: E1211 22:06:04.064822 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="swift-kuttl-tests/keystone-db-sync-kws2f" podUID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" Dec 11 22:06:04 crc kubenswrapper[4956]: I1211 22:06:04.451288 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr"] Dec 11 22:06:05 crc kubenswrapper[4956]: I1211 22:06:05.037653 4956 generic.go:334] "Generic (PLEG): container finished" podID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerID="9dc9b549d0d0efcf44c6554c21bb25f9903ce96818bb9ff6bc13b9278aa998bf" exitCode=0 Dec 11 22:06:05 crc kubenswrapper[4956]: I1211 22:06:05.037697 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" event={"ID":"868cfe57-acca-413d-a5fc-3c856d30ac3f","Type":"ContainerDied","Data":"9dc9b549d0d0efcf44c6554c21bb25f9903ce96818bb9ff6bc13b9278aa998bf"} Dec 11 22:06:05 crc kubenswrapper[4956]: I1211 22:06:05.037735 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" event={"ID":"868cfe57-acca-413d-a5fc-3c856d30ac3f","Type":"ContainerStarted","Data":"6768cff662aba29fcd2a4f8fc8d14e6b7c7dae849c4d9b0dd15264ea6dc15b9b"} Dec 11 22:06:05 crc kubenswrapper[4956]: E1211 22:06:05.039091 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-keystone:current-podified\\\"\"" pod="swift-kuttl-tests/keystone-db-sync-kws2f" podUID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" Dec 11 22:06:07 crc kubenswrapper[4956]: I1211 22:06:07.137333 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" event={"ID":"868cfe57-acca-413d-a5fc-3c856d30ac3f","Type":"ContainerStarted","Data":"fe839f875c55aca58acb62a1033ff15fc619104ce5ac6dc4adf67d652dee952b"} Dec 11 22:06:09 crc kubenswrapper[4956]: I1211 22:06:09.154696 4956 generic.go:334] "Generic (PLEG): container finished" podID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerID="fe839f875c55aca58acb62a1033ff15fc619104ce5ac6dc4adf67d652dee952b" exitCode=0 Dec 11 22:06:09 crc kubenswrapper[4956]: I1211 22:06:09.154836 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" event={"ID":"868cfe57-acca-413d-a5fc-3c856d30ac3f","Type":"ContainerDied","Data":"fe839f875c55aca58acb62a1033ff15fc619104ce5ac6dc4adf67d652dee952b"} Dec 11 22:06:10 crc kubenswrapper[4956]: I1211 22:06:10.167942 4956 generic.go:334] "Generic (PLEG): container finished" podID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerID="ea91327609913ecf3475372cca4c8289474d8d388efc03a273e897d8cb5d6993" exitCode=0 Dec 11 22:06:10 crc kubenswrapper[4956]: I1211 22:06:10.167993 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" event={"ID":"868cfe57-acca-413d-a5fc-3c856d30ac3f","Type":"ContainerDied","Data":"ea91327609913ecf3475372cca4c8289474d8d388efc03a273e897d8cb5d6993"} Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.456963 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.585548 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-util\") pod \"868cfe57-acca-413d-a5fc-3c856d30ac3f\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.585642 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-bundle\") pod \"868cfe57-acca-413d-a5fc-3c856d30ac3f\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.585716 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw5th\" (UniqueName: \"kubernetes.io/projected/868cfe57-acca-413d-a5fc-3c856d30ac3f-kube-api-access-bw5th\") pod \"868cfe57-acca-413d-a5fc-3c856d30ac3f\" (UID: \"868cfe57-acca-413d-a5fc-3c856d30ac3f\") " Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.587048 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-bundle" (OuterVolumeSpecName: "bundle") pod "868cfe57-acca-413d-a5fc-3c856d30ac3f" (UID: "868cfe57-acca-413d-a5fc-3c856d30ac3f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.591206 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/868cfe57-acca-413d-a5fc-3c856d30ac3f-kube-api-access-bw5th" (OuterVolumeSpecName: "kube-api-access-bw5th") pod "868cfe57-acca-413d-a5fc-3c856d30ac3f" (UID: "868cfe57-acca-413d-a5fc-3c856d30ac3f"). InnerVolumeSpecName "kube-api-access-bw5th". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.599913 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-util" (OuterVolumeSpecName: "util") pod "868cfe57-acca-413d-a5fc-3c856d30ac3f" (UID: "868cfe57-acca-413d-a5fc-3c856d30ac3f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.687339 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.687375 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/868cfe57-acca-413d-a5fc-3c856d30ac3f-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:11 crc kubenswrapper[4956]: I1211 22:06:11.687388 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw5th\" (UniqueName: \"kubernetes.io/projected/868cfe57-acca-413d-a5fc-3c856d30ac3f-kube-api-access-bw5th\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:12 crc kubenswrapper[4956]: I1211 22:06:12.185225 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" event={"ID":"868cfe57-acca-413d-a5fc-3c856d30ac3f","Type":"ContainerDied","Data":"6768cff662aba29fcd2a4f8fc8d14e6b7c7dae849c4d9b0dd15264ea6dc15b9b"} Dec 11 22:06:12 crc kubenswrapper[4956]: I1211 22:06:12.185273 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6768cff662aba29fcd2a4f8fc8d14e6b7c7dae849c4d9b0dd15264ea6dc15b9b" Dec 11 22:06:12 crc kubenswrapper[4956]: I1211 22:06:12.185331 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr" Dec 11 22:06:16 crc kubenswrapper[4956]: I1211 22:06:16.888284 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:06:16 crc kubenswrapper[4956]: I1211 22:06:16.888845 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:06:16 crc kubenswrapper[4956]: I1211 22:06:16.888890 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:06:16 crc kubenswrapper[4956]: I1211 22:06:16.889498 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"16eb3ed064bf8b2d4bc79eb3f1d7745450b60887fc5c6da806964966eb18a92c"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:06:16 crc kubenswrapper[4956]: I1211 22:06:16.889561 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://16eb3ed064bf8b2d4bc79eb3f1d7745450b60887fc5c6da806964966eb18a92c" gracePeriod=600 Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.877086 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn"] Dec 11 22:06:19 crc kubenswrapper[4956]: E1211 22:06:19.877747 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="util" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.877761 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="util" Dec 11 22:06:19 crc kubenswrapper[4956]: E1211 22:06:19.877802 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="extract" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.877810 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="extract" Dec 11 22:06:19 crc kubenswrapper[4956]: E1211 22:06:19.877818 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="pull" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.877823 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="pull" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.877931 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="868cfe57-acca-413d-a5fc-3c856d30ac3f" containerName="extract" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.878360 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.881896 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-service-cert" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.882272 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-g9pqw" Dec 11 22:06:19 crc kubenswrapper[4956]: I1211 22:06:19.887816 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn"] Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.011110 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-webhook-cert\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.011168 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf7nk\" (UniqueName: \"kubernetes.io/projected/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-kube-api-access-rf7nk\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.011274 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-apiservice-cert\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.112133 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-apiservice-cert\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.112241 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-webhook-cert\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.112276 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf7nk\" (UniqueName: \"kubernetes.io/projected/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-kube-api-access-rf7nk\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.118536 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-apiservice-cert\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.120192 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-webhook-cert\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.131697 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf7nk\" (UniqueName: \"kubernetes.io/projected/7a3464df-42e9-4fb4-94ae-fadc7acc42ea-kube-api-access-rf7nk\") pod \"barbican-operator-controller-manager-5879767bf-gh5wn\" (UID: \"7a3464df-42e9-4fb4-94ae-fadc7acc42ea\") " pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.193169 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.361390 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="16eb3ed064bf8b2d4bc79eb3f1d7745450b60887fc5c6da806964966eb18a92c" exitCode=0 Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.361494 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"16eb3ed064bf8b2d4bc79eb3f1d7745450b60887fc5c6da806964966eb18a92c"} Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.361810 4956 scope.go:117] "RemoveContainer" containerID="4499626ee92b2b1ce574f017b854a027fdb33d8effd0a947335164f75f9ce2f0" Dec 11 22:06:20 crc kubenswrapper[4956]: I1211 22:06:20.621840 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn"] Dec 11 22:06:21 crc kubenswrapper[4956]: I1211 22:06:21.371418 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" event={"ID":"7a3464df-42e9-4fb4-94ae-fadc7acc42ea","Type":"ContainerStarted","Data":"985508df2acee937845fc0e754265c285e7f2a1fe447a8222c505f828578465b"} Dec 11 22:06:23 crc kubenswrapper[4956]: I1211 22:06:23.384053 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-kws2f" event={"ID":"2fbd3dcf-e490-45e0-b94d-ab44f70e176e","Type":"ContainerStarted","Data":"dc4c1d73124ddb0bd856448e218e18144f13e29b839e858d2ea3365931b39379"} Dec 11 22:06:23 crc kubenswrapper[4956]: I1211 22:06:23.392850 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"c6c779569135f522c95dbbde5c790dc03324f364d3461dab8d0acf4bad2ce223"} Dec 11 22:06:23 crc kubenswrapper[4956]: I1211 22:06:23.410975 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-db-sync-kws2f" podStartSLOduration=1.506015778 podStartE2EDuration="32.410954424s" podCreationTimestamp="2025-12-11 22:05:51 +0000 UTC" firstStartedPulling="2025-12-11 22:05:51.879560177 +0000 UTC m=+1044.323938327" lastFinishedPulling="2025-12-11 22:06:22.784498813 +0000 UTC m=+1075.228876973" observedRunningTime="2025-12-11 22:06:23.408252671 +0000 UTC m=+1075.852630841" watchObservedRunningTime="2025-12-11 22:06:23.410954424 +0000 UTC m=+1075.855332574" Dec 11 22:06:26 crc kubenswrapper[4956]: I1211 22:06:26.444081 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" event={"ID":"7a3464df-42e9-4fb4-94ae-fadc7acc42ea","Type":"ContainerStarted","Data":"d5faec0ebf2ce821bd6f6bda1060fa77d9e026b608eae3d29abb9802eb4842f7"} Dec 11 22:06:27 crc kubenswrapper[4956]: I1211 22:06:27.450350 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:32 crc kubenswrapper[4956]: E1211 22:06:32.798971 4956 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fbd3dcf_e490_45e0_b94d_ab44f70e176e.slice/crio-conmon-dc4c1d73124ddb0bd856448e218e18144f13e29b839e858d2ea3365931b39379.scope\": RecentStats: unable to find data in memory cache]" Dec 11 22:06:33 crc kubenswrapper[4956]: I1211 22:06:33.495441 4956 generic.go:334] "Generic (PLEG): container finished" podID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" containerID="dc4c1d73124ddb0bd856448e218e18144f13e29b839e858d2ea3365931b39379" exitCode=0 Dec 11 22:06:33 crc kubenswrapper[4956]: I1211 22:06:33.495538 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-kws2f" event={"ID":"2fbd3dcf-e490-45e0-b94d-ab44f70e176e","Type":"ContainerDied","Data":"dc4c1d73124ddb0bd856448e218e18144f13e29b839e858d2ea3365931b39379"} Dec 11 22:06:33 crc kubenswrapper[4956]: I1211 22:06:33.513672 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" podStartSLOduration=9.102344261 podStartE2EDuration="14.513653771s" podCreationTimestamp="2025-12-11 22:06:19 +0000 UTC" firstStartedPulling="2025-12-11 22:06:20.623906129 +0000 UTC m=+1073.068284279" lastFinishedPulling="2025-12-11 22:06:26.035215639 +0000 UTC m=+1078.479593789" observedRunningTime="2025-12-11 22:06:27.48810367 +0000 UTC m=+1079.932481820" watchObservedRunningTime="2025-12-11 22:06:33.513653771 +0000 UTC m=+1085.958031921" Dec 11 22:06:34 crc kubenswrapper[4956]: I1211 22:06:34.981563 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.166919 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c78td\" (UniqueName: \"kubernetes.io/projected/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-kube-api-access-c78td\") pod \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.167514 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-config-data\") pod \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\" (UID: \"2fbd3dcf-e490-45e0-b94d-ab44f70e176e\") " Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.177043 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-kube-api-access-c78td" (OuterVolumeSpecName: "kube-api-access-c78td") pod "2fbd3dcf-e490-45e0-b94d-ab44f70e176e" (UID: "2fbd3dcf-e490-45e0-b94d-ab44f70e176e"). InnerVolumeSpecName "kube-api-access-c78td". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.225407 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-config-data" (OuterVolumeSpecName: "config-data") pod "2fbd3dcf-e490-45e0-b94d-ab44f70e176e" (UID: "2fbd3dcf-e490-45e0-b94d-ab44f70e176e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.269892 4956 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.269947 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c78td\" (UniqueName: \"kubernetes.io/projected/2fbd3dcf-e490-45e0-b94d-ab44f70e176e-kube-api-access-c78td\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.511850 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-kws2f" event={"ID":"2fbd3dcf-e490-45e0-b94d-ab44f70e176e","Type":"ContainerDied","Data":"13107eefd4dbda2f9b83a620f1c016c5f40a9ab33c0426f38af23f5fba93162c"} Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.511891 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13107eefd4dbda2f9b83a620f1c016c5f40a9ab33c0426f38af23f5fba93162c" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.511890 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-kws2f" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.714459 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-p8nhm"] Dec 11 22:06:35 crc kubenswrapper[4956]: E1211 22:06:35.714702 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" containerName="keystone-db-sync" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.714722 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" containerName="keystone-db-sync" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.714886 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" containerName="keystone-db-sync" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.715366 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.717095 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-rs76m" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.717167 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.722297 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.722538 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"osp-secret" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.723436 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.744308 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-p8nhm"] Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.878430 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ldz8\" (UniqueName: \"kubernetes.io/projected/17780bbd-36df-40ee-a35f-61f848e57120-kube-api-access-7ldz8\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.878505 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-fernet-keys\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.878541 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-credential-keys\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.878562 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-config-data\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.878702 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-scripts\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.981063 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ldz8\" (UniqueName: \"kubernetes.io/projected/17780bbd-36df-40ee-a35f-61f848e57120-kube-api-access-7ldz8\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.981184 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-fernet-keys\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.981225 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-credential-keys\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.981247 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-config-data\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.981288 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-scripts\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.985758 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-fernet-keys\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.986956 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-scripts\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.988692 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-credential-keys\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:35 crc kubenswrapper[4956]: I1211 22:06:35.992562 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-config-data\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:36 crc kubenswrapper[4956]: I1211 22:06:36.003700 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ldz8\" (UniqueName: \"kubernetes.io/projected/17780bbd-36df-40ee-a35f-61f848e57120-kube-api-access-7ldz8\") pod \"keystone-bootstrap-p8nhm\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:36 crc kubenswrapper[4956]: I1211 22:06:36.036554 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:36 crc kubenswrapper[4956]: I1211 22:06:36.363012 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-p8nhm"] Dec 11 22:06:36 crc kubenswrapper[4956]: W1211 22:06:36.367330 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17780bbd_36df_40ee_a35f_61f848e57120.slice/crio-d6b4c35adb370494df1a7e61f189c0d7705a53cc7dd740903a2e8cff328c9062 WatchSource:0}: Error finding container d6b4c35adb370494df1a7e61f189c0d7705a53cc7dd740903a2e8cff328c9062: Status 404 returned error can't find the container with id d6b4c35adb370494df1a7e61f189c0d7705a53cc7dd740903a2e8cff328c9062 Dec 11 22:06:36 crc kubenswrapper[4956]: I1211 22:06:36.519617 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" event={"ID":"17780bbd-36df-40ee-a35f-61f848e57120","Type":"ContainerStarted","Data":"d6b4c35adb370494df1a7e61f189c0d7705a53cc7dd740903a2e8cff328c9062"} Dec 11 22:06:37 crc kubenswrapper[4956]: I1211 22:06:37.539101 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" event={"ID":"17780bbd-36df-40ee-a35f-61f848e57120","Type":"ContainerStarted","Data":"f191445703ff927c9f6afe5eb9ae2407808a76ccbc7423240210401ff9163723"} Dec 11 22:06:37 crc kubenswrapper[4956]: I1211 22:06:37.559173 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" podStartSLOduration=2.559152369 podStartE2EDuration="2.559152369s" podCreationTimestamp="2025-12-11 22:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:06:37.556515858 +0000 UTC m=+1090.000894008" watchObservedRunningTime="2025-12-11 22:06:37.559152369 +0000 UTC m=+1090.003530529" Dec 11 22:06:40 crc kubenswrapper[4956]: I1211 22:06:40.201265 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-5879767bf-gh5wn" Dec 11 22:06:40 crc kubenswrapper[4956]: I1211 22:06:40.989208 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-db-create-lkhh8"] Dec 11 22:06:40 crc kubenswrapper[4956]: I1211 22:06:40.990195 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.012054 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-create-lkhh8"] Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.022340 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc"] Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.023357 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.039325 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-db-secret" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.047261 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc"] Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.083222 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/990facdf-1f75-4afc-b70b-464384b2c021-operator-scripts\") pod \"barbican-db-create-lkhh8\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.083281 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g67nf\" (UniqueName: \"kubernetes.io/projected/990facdf-1f75-4afc-b70b-464384b2c021-kube-api-access-g67nf\") pod \"barbican-db-create-lkhh8\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.184431 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbq7x\" (UniqueName: \"kubernetes.io/projected/5ffb504a-4c0b-483e-80b3-17c9ffd66385-kube-api-access-pbq7x\") pod \"barbican-5eb0-account-create-update-dv7rc\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.184481 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffb504a-4c0b-483e-80b3-17c9ffd66385-operator-scripts\") pod \"barbican-5eb0-account-create-update-dv7rc\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.184573 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/990facdf-1f75-4afc-b70b-464384b2c021-operator-scripts\") pod \"barbican-db-create-lkhh8\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.184604 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g67nf\" (UniqueName: \"kubernetes.io/projected/990facdf-1f75-4afc-b70b-464384b2c021-kube-api-access-g67nf\") pod \"barbican-db-create-lkhh8\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.185731 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/990facdf-1f75-4afc-b70b-464384b2c021-operator-scripts\") pod \"barbican-db-create-lkhh8\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.203591 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g67nf\" (UniqueName: \"kubernetes.io/projected/990facdf-1f75-4afc-b70b-464384b2c021-kube-api-access-g67nf\") pod \"barbican-db-create-lkhh8\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.285470 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffb504a-4c0b-483e-80b3-17c9ffd66385-operator-scripts\") pod \"barbican-5eb0-account-create-update-dv7rc\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.285879 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbq7x\" (UniqueName: \"kubernetes.io/projected/5ffb504a-4c0b-483e-80b3-17c9ffd66385-kube-api-access-pbq7x\") pod \"barbican-5eb0-account-create-update-dv7rc\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.286450 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffb504a-4c0b-483e-80b3-17c9ffd66385-operator-scripts\") pod \"barbican-5eb0-account-create-update-dv7rc\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.305341 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbq7x\" (UniqueName: \"kubernetes.io/projected/5ffb504a-4c0b-483e-80b3-17c9ffd66385-kube-api-access-pbq7x\") pod \"barbican-5eb0-account-create-update-dv7rc\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.308026 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:41 crc kubenswrapper[4956]: I1211 22:06:41.340276 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:42 crc kubenswrapper[4956]: I1211 22:06:42.355511 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc"] Dec 11 22:06:42 crc kubenswrapper[4956]: I1211 22:06:42.673580 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-create-lkhh8"] Dec 11 22:06:43 crc kubenswrapper[4956]: I1211 22:06:43.586169 4956 generic.go:334] "Generic (PLEG): container finished" podID="5ffb504a-4c0b-483e-80b3-17c9ffd66385" containerID="f4121f79478a96f02bbdb01651fb7765faa55944acaba4a9451411c1b547074c" exitCode=0 Dec 11 22:06:43 crc kubenswrapper[4956]: I1211 22:06:43.586310 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" event={"ID":"5ffb504a-4c0b-483e-80b3-17c9ffd66385","Type":"ContainerDied","Data":"f4121f79478a96f02bbdb01651fb7765faa55944acaba4a9451411c1b547074c"} Dec 11 22:06:43 crc kubenswrapper[4956]: I1211 22:06:43.586673 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" event={"ID":"5ffb504a-4c0b-483e-80b3-17c9ffd66385","Type":"ContainerStarted","Data":"843b28436c7b95c93ff02e0a1914d331cae14e5b42aed2bfd837bc0345cad372"} Dec 11 22:06:43 crc kubenswrapper[4956]: I1211 22:06:43.587895 4956 generic.go:334] "Generic (PLEG): container finished" podID="990facdf-1f75-4afc-b70b-464384b2c021" containerID="6ccd9c935f4174be89722490acdab4a95c0b75b7d85f7d38925db439146a88d1" exitCode=0 Dec 11 22:06:43 crc kubenswrapper[4956]: I1211 22:06:43.587925 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-lkhh8" event={"ID":"990facdf-1f75-4afc-b70b-464384b2c021","Type":"ContainerDied","Data":"6ccd9c935f4174be89722490acdab4a95c0b75b7d85f7d38925db439146a88d1"} Dec 11 22:06:43 crc kubenswrapper[4956]: I1211 22:06:43.587941 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-lkhh8" event={"ID":"990facdf-1f75-4afc-b70b-464384b2c021","Type":"ContainerStarted","Data":"b442727e319838ed9cd2336ed8a283cccc4917d3100144effe1aebe60bafae9b"} Dec 11 22:06:44 crc kubenswrapper[4956]: I1211 22:06:44.600757 4956 generic.go:334] "Generic (PLEG): container finished" podID="17780bbd-36df-40ee-a35f-61f848e57120" containerID="f191445703ff927c9f6afe5eb9ae2407808a76ccbc7423240210401ff9163723" exitCode=0 Dec 11 22:06:44 crc kubenswrapper[4956]: I1211 22:06:44.600850 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" event={"ID":"17780bbd-36df-40ee-a35f-61f848e57120","Type":"ContainerDied","Data":"f191445703ff927c9f6afe5eb9ae2407808a76ccbc7423240210401ff9163723"} Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.054157 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.060544 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.162420 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/990facdf-1f75-4afc-b70b-464384b2c021-operator-scripts\") pod \"990facdf-1f75-4afc-b70b-464384b2c021\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.162944 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbq7x\" (UniqueName: \"kubernetes.io/projected/5ffb504a-4c0b-483e-80b3-17c9ffd66385-kube-api-access-pbq7x\") pod \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.163234 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffb504a-4c0b-483e-80b3-17c9ffd66385-operator-scripts\") pod \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\" (UID: \"5ffb504a-4c0b-483e-80b3-17c9ffd66385\") " Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.163479 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/990facdf-1f75-4afc-b70b-464384b2c021-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "990facdf-1f75-4afc-b70b-464384b2c021" (UID: "990facdf-1f75-4afc-b70b-464384b2c021"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.163740 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g67nf\" (UniqueName: \"kubernetes.io/projected/990facdf-1f75-4afc-b70b-464384b2c021-kube-api-access-g67nf\") pod \"990facdf-1f75-4afc-b70b-464384b2c021\" (UID: \"990facdf-1f75-4afc-b70b-464384b2c021\") " Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.164347 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ffb504a-4c0b-483e-80b3-17c9ffd66385-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5ffb504a-4c0b-483e-80b3-17c9ffd66385" (UID: "5ffb504a-4c0b-483e-80b3-17c9ffd66385"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.164362 4956 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/990facdf-1f75-4afc-b70b-464384b2c021-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.175055 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ffb504a-4c0b-483e-80b3-17c9ffd66385-kube-api-access-pbq7x" (OuterVolumeSpecName: "kube-api-access-pbq7x") pod "5ffb504a-4c0b-483e-80b3-17c9ffd66385" (UID: "5ffb504a-4c0b-483e-80b3-17c9ffd66385"). InnerVolumeSpecName "kube-api-access-pbq7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.175131 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/990facdf-1f75-4afc-b70b-464384b2c021-kube-api-access-g67nf" (OuterVolumeSpecName: "kube-api-access-g67nf") pod "990facdf-1f75-4afc-b70b-464384b2c021" (UID: "990facdf-1f75-4afc-b70b-464384b2c021"). InnerVolumeSpecName "kube-api-access-g67nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.265350 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g67nf\" (UniqueName: \"kubernetes.io/projected/990facdf-1f75-4afc-b70b-464384b2c021-kube-api-access-g67nf\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.265386 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbq7x\" (UniqueName: \"kubernetes.io/projected/5ffb504a-4c0b-483e-80b3-17c9ffd66385-kube-api-access-pbq7x\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.265395 4956 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ffb504a-4c0b-483e-80b3-17c9ffd66385-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.637566 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-lkhh8" event={"ID":"990facdf-1f75-4afc-b70b-464384b2c021","Type":"ContainerDied","Data":"b442727e319838ed9cd2336ed8a283cccc4917d3100144effe1aebe60bafae9b"} Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.637863 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b442727e319838ed9cd2336ed8a283cccc4917d3100144effe1aebe60bafae9b" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.637583 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-lkhh8" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.639975 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" event={"ID":"5ffb504a-4c0b-483e-80b3-17c9ffd66385","Type":"ContainerDied","Data":"843b28436c7b95c93ff02e0a1914d331cae14e5b42aed2bfd837bc0345cad372"} Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.640042 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.640045 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="843b28436c7b95c93ff02e0a1914d331cae14e5b42aed2bfd837bc0345cad372" Dec 11 22:06:45 crc kubenswrapper[4956]: I1211 22:06:45.978004 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.178009 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-scripts\") pod \"17780bbd-36df-40ee-a35f-61f848e57120\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.178104 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-config-data\") pod \"17780bbd-36df-40ee-a35f-61f848e57120\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.178170 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ldz8\" (UniqueName: \"kubernetes.io/projected/17780bbd-36df-40ee-a35f-61f848e57120-kube-api-access-7ldz8\") pod \"17780bbd-36df-40ee-a35f-61f848e57120\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.178254 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-credential-keys\") pod \"17780bbd-36df-40ee-a35f-61f848e57120\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.178286 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-fernet-keys\") pod \"17780bbd-36df-40ee-a35f-61f848e57120\" (UID: \"17780bbd-36df-40ee-a35f-61f848e57120\") " Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.182565 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "17780bbd-36df-40ee-a35f-61f848e57120" (UID: "17780bbd-36df-40ee-a35f-61f848e57120"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.182616 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "17780bbd-36df-40ee-a35f-61f848e57120" (UID: "17780bbd-36df-40ee-a35f-61f848e57120"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.183471 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17780bbd-36df-40ee-a35f-61f848e57120-kube-api-access-7ldz8" (OuterVolumeSpecName: "kube-api-access-7ldz8") pod "17780bbd-36df-40ee-a35f-61f848e57120" (UID: "17780bbd-36df-40ee-a35f-61f848e57120"). InnerVolumeSpecName "kube-api-access-7ldz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.185834 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-scripts" (OuterVolumeSpecName: "scripts") pod "17780bbd-36df-40ee-a35f-61f848e57120" (UID: "17780bbd-36df-40ee-a35f-61f848e57120"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.219199 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-config-data" (OuterVolumeSpecName: "config-data") pod "17780bbd-36df-40ee-a35f-61f848e57120" (UID: "17780bbd-36df-40ee-a35f-61f848e57120"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.279478 4956 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.279519 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ldz8\" (UniqueName: \"kubernetes.io/projected/17780bbd-36df-40ee-a35f-61f848e57120-kube-api-access-7ldz8\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.279533 4956 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.279542 4956 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.279551 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17780bbd-36df-40ee-a35f-61f848e57120-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.652220 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" event={"ID":"17780bbd-36df-40ee-a35f-61f848e57120","Type":"ContainerDied","Data":"d6b4c35adb370494df1a7e61f189c0d7705a53cc7dd740903a2e8cff328c9062"} Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.652265 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6b4c35adb370494df1a7e61f189c0d7705a53cc7dd740903a2e8cff328c9062" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.652278 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-p8nhm" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.713737 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-85866bffd9-vxw6k"] Dec 11 22:06:46 crc kubenswrapper[4956]: E1211 22:06:46.714195 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17780bbd-36df-40ee-a35f-61f848e57120" containerName="keystone-bootstrap" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714218 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="17780bbd-36df-40ee-a35f-61f848e57120" containerName="keystone-bootstrap" Dec 11 22:06:46 crc kubenswrapper[4956]: E1211 22:06:46.714233 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ffb504a-4c0b-483e-80b3-17c9ffd66385" containerName="mariadb-account-create-update" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714241 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ffb504a-4c0b-483e-80b3-17c9ffd66385" containerName="mariadb-account-create-update" Dec 11 22:06:46 crc kubenswrapper[4956]: E1211 22:06:46.714256 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990facdf-1f75-4afc-b70b-464384b2c021" containerName="mariadb-database-create" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714265 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="990facdf-1f75-4afc-b70b-464384b2c021" containerName="mariadb-database-create" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714404 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="17780bbd-36df-40ee-a35f-61f848e57120" containerName="keystone-bootstrap" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714416 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="990facdf-1f75-4afc-b70b-464384b2c021" containerName="mariadb-database-create" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714439 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ffb504a-4c0b-483e-80b3-17c9ffd66385" containerName="mariadb-account-create-update" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.714952 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.718608 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.719050 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.719169 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.719272 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-rs76m" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.731266 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-85866bffd9-vxw6k"] Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.888218 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-credential-keys\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.888722 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-config-data\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.888868 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-fernet-keys\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.888925 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzxk8\" (UniqueName: \"kubernetes.io/projected/2591ae1a-7dde-47f5-a915-c623aa755a37-kube-api-access-jzxk8\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.888958 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-scripts\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.990147 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-config-data\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.990234 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-fernet-keys\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.990539 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzxk8\" (UniqueName: \"kubernetes.io/projected/2591ae1a-7dde-47f5-a915-c623aa755a37-kube-api-access-jzxk8\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.990562 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-scripts\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.990618 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-credential-keys\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.994202 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-credential-keys\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.994779 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-fernet-keys\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.995144 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-config-data\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:46 crc kubenswrapper[4956]: I1211 22:06:46.996429 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2591ae1a-7dde-47f5-a915-c623aa755a37-scripts\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:47 crc kubenswrapper[4956]: I1211 22:06:47.009162 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzxk8\" (UniqueName: \"kubernetes.io/projected/2591ae1a-7dde-47f5-a915-c623aa755a37-kube-api-access-jzxk8\") pod \"keystone-85866bffd9-vxw6k\" (UID: \"2591ae1a-7dde-47f5-a915-c623aa755a37\") " pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:47 crc kubenswrapper[4956]: I1211 22:06:47.033635 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:47 crc kubenswrapper[4956]: I1211 22:06:47.456990 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-85866bffd9-vxw6k"] Dec 11 22:06:47 crc kubenswrapper[4956]: W1211 22:06:47.465675 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2591ae1a_7dde_47f5_a915_c623aa755a37.slice/crio-1b27e509a102722a5bf531e7cd23eeecf5c81c3f6c18059617632c50ce41a7fa WatchSource:0}: Error finding container 1b27e509a102722a5bf531e7cd23eeecf5c81c3f6c18059617632c50ce41a7fa: Status 404 returned error can't find the container with id 1b27e509a102722a5bf531e7cd23eeecf5c81c3f6c18059617632c50ce41a7fa Dec 11 22:06:47 crc kubenswrapper[4956]: I1211 22:06:47.660005 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" event={"ID":"2591ae1a-7dde-47f5-a915-c623aa755a37","Type":"ContainerStarted","Data":"1b27e509a102722a5bf531e7cd23eeecf5c81c3f6c18059617632c50ce41a7fa"} Dec 11 22:06:48 crc kubenswrapper[4956]: I1211 22:06:48.670223 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" event={"ID":"2591ae1a-7dde-47f5-a915-c623aa755a37","Type":"ContainerStarted","Data":"21c62966c35e81f2b8b19508557829454f4f0b327577ef15988b01eee2e58fe8"} Dec 11 22:06:48 crc kubenswrapper[4956]: I1211 22:06:48.670404 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:06:48 crc kubenswrapper[4956]: I1211 22:06:48.688172 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" podStartSLOduration=2.688151125 podStartE2EDuration="2.688151125s" podCreationTimestamp="2025-12-11 22:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:06:48.683118248 +0000 UTC m=+1101.127496438" watchObservedRunningTime="2025-12-11 22:06:48.688151125 +0000 UTC m=+1101.132529495" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.336079 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-db-sync-s6kdn"] Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.338059 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.339912 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-barbican-dockercfg-bpmww" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.340705 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-config-data" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.350467 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-s6kdn"] Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.495108 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klpkn\" (UniqueName: \"kubernetes.io/projected/8f094fcd-e905-4061-8726-a536b25ddbc7-kube-api-access-klpkn\") pod \"barbican-db-sync-s6kdn\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.495186 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f094fcd-e905-4061-8726-a536b25ddbc7-db-sync-config-data\") pod \"barbican-db-sync-s6kdn\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.596349 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klpkn\" (UniqueName: \"kubernetes.io/projected/8f094fcd-e905-4061-8726-a536b25ddbc7-kube-api-access-klpkn\") pod \"barbican-db-sync-s6kdn\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.596398 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f094fcd-e905-4061-8726-a536b25ddbc7-db-sync-config-data\") pod \"barbican-db-sync-s6kdn\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.611631 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f094fcd-e905-4061-8726-a536b25ddbc7-db-sync-config-data\") pod \"barbican-db-sync-s6kdn\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.611756 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klpkn\" (UniqueName: \"kubernetes.io/projected/8f094fcd-e905-4061-8726-a536b25ddbc7-kube-api-access-klpkn\") pod \"barbican-db-sync-s6kdn\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:51 crc kubenswrapper[4956]: I1211 22:06:51.661812 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:06:52 crc kubenswrapper[4956]: I1211 22:06:52.163422 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-s6kdn"] Dec 11 22:06:52 crc kubenswrapper[4956]: W1211 22:06:52.176541 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f094fcd_e905_4061_8726_a536b25ddbc7.slice/crio-7144fe1dd135c95824b299fd4895b64f7afd3abd21e424369eecde5ae713a848 WatchSource:0}: Error finding container 7144fe1dd135c95824b299fd4895b64f7afd3abd21e424369eecde5ae713a848: Status 404 returned error can't find the container with id 7144fe1dd135c95824b299fd4895b64f7afd3abd21e424369eecde5ae713a848 Dec 11 22:06:52 crc kubenswrapper[4956]: I1211 22:06:52.696119 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" event={"ID":"8f094fcd-e905-4061-8726-a536b25ddbc7","Type":"ContainerStarted","Data":"7144fe1dd135c95824b299fd4895b64f7afd3abd21e424369eecde5ae713a848"} Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.468431 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-gg48q"] Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.469222 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.472064 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-index-dockercfg-2r64b" Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.478292 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-gg48q"] Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.545845 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcnmz\" (UniqueName: \"kubernetes.io/projected/e1af6044-d7ab-46d3-8bc9-cf8a341be1d6-kube-api-access-zcnmz\") pod \"swift-operator-index-gg48q\" (UID: \"e1af6044-d7ab-46d3-8bc9-cf8a341be1d6\") " pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.648132 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcnmz\" (UniqueName: \"kubernetes.io/projected/e1af6044-d7ab-46d3-8bc9-cf8a341be1d6-kube-api-access-zcnmz\") pod \"swift-operator-index-gg48q\" (UID: \"e1af6044-d7ab-46d3-8bc9-cf8a341be1d6\") " pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.668131 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcnmz\" (UniqueName: \"kubernetes.io/projected/e1af6044-d7ab-46d3-8bc9-cf8a341be1d6-kube-api-access-zcnmz\") pod \"swift-operator-index-gg48q\" (UID: \"e1af6044-d7ab-46d3-8bc9-cf8a341be1d6\") " pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:06:54 crc kubenswrapper[4956]: I1211 22:06:54.795369 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:06:55 crc kubenswrapper[4956]: I1211 22:06:55.264880 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-gg48q"] Dec 11 22:06:55 crc kubenswrapper[4956]: W1211 22:06:55.281944 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1af6044_d7ab_46d3_8bc9_cf8a341be1d6.slice/crio-962539944f39575309b9effd008d5120d3cd804e93aa37c61affcde062c96189 WatchSource:0}: Error finding container 962539944f39575309b9effd008d5120d3cd804e93aa37c61affcde062c96189: Status 404 returned error can't find the container with id 962539944f39575309b9effd008d5120d3cd804e93aa37c61affcde062c96189 Dec 11 22:06:55 crc kubenswrapper[4956]: I1211 22:06:55.718739 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-gg48q" event={"ID":"e1af6044-d7ab-46d3-8bc9-cf8a341be1d6","Type":"ContainerStarted","Data":"962539944f39575309b9effd008d5120d3cd804e93aa37c61affcde062c96189"} Dec 11 22:06:56 crc kubenswrapper[4956]: I1211 22:06:56.729525 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-gg48q" event={"ID":"e1af6044-d7ab-46d3-8bc9-cf8a341be1d6","Type":"ContainerStarted","Data":"d5833cded3ab6d89076285b9c3d0003ea58d6a44dede9b33e5e9c03c3ebee17c"} Dec 11 22:06:56 crc kubenswrapper[4956]: I1211 22:06:56.744343 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-gg48q" podStartSLOduration=2.298396411 podStartE2EDuration="2.744324439s" podCreationTimestamp="2025-12-11 22:06:54 +0000 UTC" firstStartedPulling="2025-12-11 22:06:55.283338336 +0000 UTC m=+1107.727716486" lastFinishedPulling="2025-12-11 22:06:55.729266374 +0000 UTC m=+1108.173644514" observedRunningTime="2025-12-11 22:06:56.743080725 +0000 UTC m=+1109.187458875" watchObservedRunningTime="2025-12-11 22:06:56.744324439 +0000 UTC m=+1109.188702589" Dec 11 22:06:59 crc kubenswrapper[4956]: I1211 22:06:59.752575 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" event={"ID":"8f094fcd-e905-4061-8726-a536b25ddbc7","Type":"ContainerStarted","Data":"9d7a56d817507731d9f5b96350bc192ca763c97465cf929d74410c27961c2387"} Dec 11 22:06:59 crc kubenswrapper[4956]: I1211 22:06:59.770683 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" podStartSLOduration=1.602660644 podStartE2EDuration="8.770663211s" podCreationTimestamp="2025-12-11 22:06:51 +0000 UTC" firstStartedPulling="2025-12-11 22:06:52.178839565 +0000 UTC m=+1104.623217715" lastFinishedPulling="2025-12-11 22:06:59.346842132 +0000 UTC m=+1111.791220282" observedRunningTime="2025-12-11 22:06:59.767945858 +0000 UTC m=+1112.212324018" watchObservedRunningTime="2025-12-11 22:06:59.770663211 +0000 UTC m=+1112.215041361" Dec 11 22:07:03 crc kubenswrapper[4956]: I1211 22:07:03.798650 4956 generic.go:334] "Generic (PLEG): container finished" podID="8f094fcd-e905-4061-8726-a536b25ddbc7" containerID="9d7a56d817507731d9f5b96350bc192ca763c97465cf929d74410c27961c2387" exitCode=0 Dec 11 22:07:03 crc kubenswrapper[4956]: I1211 22:07:03.798835 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" event={"ID":"8f094fcd-e905-4061-8726-a536b25ddbc7","Type":"ContainerDied","Data":"9d7a56d817507731d9f5b96350bc192ca763c97465cf929d74410c27961c2387"} Dec 11 22:07:04 crc kubenswrapper[4956]: I1211 22:07:04.795552 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:07:04 crc kubenswrapper[4956]: I1211 22:07:04.795695 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:07:04 crc kubenswrapper[4956]: I1211 22:07:04.928321 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.186269 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.323123 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f094fcd-e905-4061-8726-a536b25ddbc7-db-sync-config-data\") pod \"8f094fcd-e905-4061-8726-a536b25ddbc7\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.323284 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klpkn\" (UniqueName: \"kubernetes.io/projected/8f094fcd-e905-4061-8726-a536b25ddbc7-kube-api-access-klpkn\") pod \"8f094fcd-e905-4061-8726-a536b25ddbc7\" (UID: \"8f094fcd-e905-4061-8726-a536b25ddbc7\") " Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.328950 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f094fcd-e905-4061-8726-a536b25ddbc7-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8f094fcd-e905-4061-8726-a536b25ddbc7" (UID: "8f094fcd-e905-4061-8726-a536b25ddbc7"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.329003 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f094fcd-e905-4061-8726-a536b25ddbc7-kube-api-access-klpkn" (OuterVolumeSpecName: "kube-api-access-klpkn") pod "8f094fcd-e905-4061-8726-a536b25ddbc7" (UID: "8f094fcd-e905-4061-8726-a536b25ddbc7"). InnerVolumeSpecName "kube-api-access-klpkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.425482 4956 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f094fcd-e905-4061-8726-a536b25ddbc7-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.425518 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klpkn\" (UniqueName: \"kubernetes.io/projected/8f094fcd-e905-4061-8726-a536b25ddbc7-kube-api-access-klpkn\") on node \"crc\" DevicePath \"\"" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.814536 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.814525 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-s6kdn" event={"ID":"8f094fcd-e905-4061-8726-a536b25ddbc7","Type":"ContainerDied","Data":"7144fe1dd135c95824b299fd4895b64f7afd3abd21e424369eecde5ae713a848"} Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.815045 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7144fe1dd135c95824b299fd4895b64f7afd3abd21e424369eecde5ae713a848" Dec 11 22:07:05 crc kubenswrapper[4956]: I1211 22:07:05.842333 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-index-gg48q" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.070883 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp"] Dec 11 22:07:06 crc kubenswrapper[4956]: E1211 22:07:06.071128 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f094fcd-e905-4061-8726-a536b25ddbc7" containerName="barbican-db-sync" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.071140 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f094fcd-e905-4061-8726-a536b25ddbc7" containerName="barbican-db-sync" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.071238 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f094fcd-e905-4061-8726-a536b25ddbc7" containerName="barbican-db-sync" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.071884 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.074293 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-worker-config-data" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.074746 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-barbican-dockercfg-bpmww" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.074897 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-config-data" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.084494 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp"] Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.090452 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks"] Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.092220 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.097081 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-keystone-listener-config-data" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.101392 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks"] Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187188 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8e74f81-6253-4f42-89ba-4023bdd47d65-config-data-custom\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187238 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5tdk\" (UniqueName: \"kubernetes.io/projected/f8e74f81-6253-4f42-89ba-4023bdd47d65-kube-api-access-d5tdk\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187258 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8e74f81-6253-4f42-89ba-4023bdd47d65-logs\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187295 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8e74f81-6253-4f42-89ba-4023bdd47d65-config-data\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187315 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/496ff541-0e71-4497-89ac-e860c0c5300a-logs\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187375 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsz7g\" (UniqueName: \"kubernetes.io/projected/496ff541-0e71-4497-89ac-e860c0c5300a-kube-api-access-nsz7g\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187429 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/496ff541-0e71-4497-89ac-e860c0c5300a-config-data\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.187452 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/496ff541-0e71-4497-89ac-e860c0c5300a-config-data-custom\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.262862 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq"] Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.264224 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.266834 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-api-config-data" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.276528 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq"] Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293003 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/496ff541-0e71-4497-89ac-e860c0c5300a-config-data\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293046 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/496ff541-0e71-4497-89ac-e860c0c5300a-config-data-custom\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293068 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8e74f81-6253-4f42-89ba-4023bdd47d65-config-data-custom\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293091 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5tdk\" (UniqueName: \"kubernetes.io/projected/f8e74f81-6253-4f42-89ba-4023bdd47d65-kube-api-access-d5tdk\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293123 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8e74f81-6253-4f42-89ba-4023bdd47d65-logs\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293157 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8e74f81-6253-4f42-89ba-4023bdd47d65-config-data\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293175 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/496ff541-0e71-4497-89ac-e860c0c5300a-logs\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.293201 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsz7g\" (UniqueName: \"kubernetes.io/projected/496ff541-0e71-4497-89ac-e860c0c5300a-kube-api-access-nsz7g\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.294000 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/496ff541-0e71-4497-89ac-e860c0c5300a-logs\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.294032 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f8e74f81-6253-4f42-89ba-4023bdd47d65-logs\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.296680 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/496ff541-0e71-4497-89ac-e860c0c5300a-config-data-custom\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.296854 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/496ff541-0e71-4497-89ac-e860c0c5300a-config-data\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.299914 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8e74f81-6253-4f42-89ba-4023bdd47d65-config-data\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.308994 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5tdk\" (UniqueName: \"kubernetes.io/projected/f8e74f81-6253-4f42-89ba-4023bdd47d65-kube-api-access-d5tdk\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.310461 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsz7g\" (UniqueName: \"kubernetes.io/projected/496ff541-0e71-4497-89ac-e860c0c5300a-kube-api-access-nsz7g\") pod \"barbican-keystone-listener-6f89b79686-srzks\" (UID: \"496ff541-0e71-4497-89ac-e860c0c5300a\") " pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.315433 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f8e74f81-6253-4f42-89ba-4023bdd47d65-config-data-custom\") pod \"barbican-worker-5549fbbc4f-bvswp\" (UID: \"f8e74f81-6253-4f42-89ba-4023bdd47d65\") " pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.394306 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c282539-1074-4e74-bea2-dfe83a575a5c-config-data-custom\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.394355 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqpcq\" (UniqueName: \"kubernetes.io/projected/3c282539-1074-4e74-bea2-dfe83a575a5c-kube-api-access-dqpcq\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.394533 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c282539-1074-4e74-bea2-dfe83a575a5c-config-data\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.394581 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c282539-1074-4e74-bea2-dfe83a575a5c-logs\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.406955 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.416864 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.497617 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c282539-1074-4e74-bea2-dfe83a575a5c-config-data\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.497676 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c282539-1074-4e74-bea2-dfe83a575a5c-logs\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.497706 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c282539-1074-4e74-bea2-dfe83a575a5c-config-data-custom\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.497735 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqpcq\" (UniqueName: \"kubernetes.io/projected/3c282539-1074-4e74-bea2-dfe83a575a5c-kube-api-access-dqpcq\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.498528 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c282539-1074-4e74-bea2-dfe83a575a5c-logs\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.502811 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c282539-1074-4e74-bea2-dfe83a575a5c-config-data-custom\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.513824 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqpcq\" (UniqueName: \"kubernetes.io/projected/3c282539-1074-4e74-bea2-dfe83a575a5c-kube-api-access-dqpcq\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.515349 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c282539-1074-4e74-bea2-dfe83a575a5c-config-data\") pod \"barbican-api-84dc9b8b44-tt7zq\" (UID: \"3c282539-1074-4e74-bea2-dfe83a575a5c\") " pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.579251 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.933179 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp"] Dec 11 22:07:06 crc kubenswrapper[4956]: W1211 22:07:06.936288 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8e74f81_6253_4f42_89ba_4023bdd47d65.slice/crio-39f10d4e93e7fad16b3ccf145abe0f1a0176d628ae6cd58c8b5081abd2e3102e WatchSource:0}: Error finding container 39f10d4e93e7fad16b3ccf145abe0f1a0176d628ae6cd58c8b5081abd2e3102e: Status 404 returned error can't find the container with id 39f10d4e93e7fad16b3ccf145abe0f1a0176d628ae6cd58c8b5081abd2e3102e Dec 11 22:07:06 crc kubenswrapper[4956]: I1211 22:07:06.974999 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq"] Dec 11 22:07:06 crc kubenswrapper[4956]: W1211 22:07:06.978823 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c282539_1074_4e74_bea2_dfe83a575a5c.slice/crio-5404365199a3dc29b50bad840d1f21815c080ae94c0073b065e5e114ca02fa39 WatchSource:0}: Error finding container 5404365199a3dc29b50bad840d1f21815c080ae94c0073b065e5e114ca02fa39: Status 404 returned error can't find the container with id 5404365199a3dc29b50bad840d1f21815c080ae94c0073b065e5e114ca02fa39 Dec 11 22:07:07 crc kubenswrapper[4956]: I1211 22:07:07.056820 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks"] Dec 11 22:07:07 crc kubenswrapper[4956]: W1211 22:07:07.066288 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod496ff541_0e71_4497_89ac_e860c0c5300a.slice/crio-14cca32e5e413fd911252c699380875e1a1097cf9b52e5e4b088be006e6f721e WatchSource:0}: Error finding container 14cca32e5e413fd911252c699380875e1a1097cf9b52e5e4b088be006e6f721e: Status 404 returned error can't find the container with id 14cca32e5e413fd911252c699380875e1a1097cf9b52e5e4b088be006e6f721e Dec 11 22:07:07 crc kubenswrapper[4956]: I1211 22:07:07.831498 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" event={"ID":"3c282539-1074-4e74-bea2-dfe83a575a5c","Type":"ContainerStarted","Data":"5dde37fd9365d93eb1cf7808e5aeda30aa3b2c7b96a07fe5c3a27a5ee475bd25"} Dec 11 22:07:07 crc kubenswrapper[4956]: I1211 22:07:07.831849 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" event={"ID":"3c282539-1074-4e74-bea2-dfe83a575a5c","Type":"ContainerStarted","Data":"5404365199a3dc29b50bad840d1f21815c080ae94c0073b065e5e114ca02fa39"} Dec 11 22:07:07 crc kubenswrapper[4956]: I1211 22:07:07.832621 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" event={"ID":"496ff541-0e71-4497-89ac-e860c0c5300a","Type":"ContainerStarted","Data":"14cca32e5e413fd911252c699380875e1a1097cf9b52e5e4b088be006e6f721e"} Dec 11 22:07:07 crc kubenswrapper[4956]: I1211 22:07:07.833881 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" event={"ID":"f8e74f81-6253-4f42-89ba-4023bdd47d65","Type":"ContainerStarted","Data":"39f10d4e93e7fad16b3ccf145abe0f1a0176d628ae6cd58c8b5081abd2e3102e"} Dec 11 22:07:08 crc kubenswrapper[4956]: I1211 22:07:08.843960 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" event={"ID":"3c282539-1074-4e74-bea2-dfe83a575a5c","Type":"ContainerStarted","Data":"18bef549117c7eef2cc387d560ec7b8b03b50246b227e2645d9fd86473dab47c"} Dec 11 22:07:08 crc kubenswrapper[4956]: I1211 22:07:08.844312 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:08 crc kubenswrapper[4956]: I1211 22:07:08.844327 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:08 crc kubenswrapper[4956]: I1211 22:07:08.864556 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" podStartSLOduration=2.864532482 podStartE2EDuration="2.864532482s" podCreationTimestamp="2025-12-11 22:07:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:07:08.860410671 +0000 UTC m=+1121.304788851" watchObservedRunningTime="2025-12-11 22:07:08.864532482 +0000 UTC m=+1121.308910632" Dec 11 22:07:10 crc kubenswrapper[4956]: I1211 22:07:10.867961 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" event={"ID":"496ff541-0e71-4497-89ac-e860c0c5300a","Type":"ContainerStarted","Data":"9c4b8d5db4496e390c76d9734c39cf36b56237002c282e7a7b164a1501ba8e0c"} Dec 11 22:07:10 crc kubenswrapper[4956]: I1211 22:07:10.868629 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" event={"ID":"496ff541-0e71-4497-89ac-e860c0c5300a","Type":"ContainerStarted","Data":"bc2e879d98c6abc557657088927de6a61f3053e740367403f86180988797d3b5"} Dec 11 22:07:10 crc kubenswrapper[4956]: I1211 22:07:10.870004 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" event={"ID":"f8e74f81-6253-4f42-89ba-4023bdd47d65","Type":"ContainerStarted","Data":"8a166939dd5ac52cb96a4d2807b12a31a598cafa647a74944677b68ff7eca831"} Dec 11 22:07:10 crc kubenswrapper[4956]: I1211 22:07:10.870041 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" event={"ID":"f8e74f81-6253-4f42-89ba-4023bdd47d65","Type":"ContainerStarted","Data":"255446cda28167d915a8bcf9e92066eb6b6c1b8419b5d47caa4557462c6b1610"} Dec 11 22:07:10 crc kubenswrapper[4956]: I1211 22:07:10.972873 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-keystone-listener-6f89b79686-srzks" podStartSLOduration=2.329275804 podStartE2EDuration="4.972851252s" podCreationTimestamp="2025-12-11 22:07:06 +0000 UTC" firstStartedPulling="2025-12-11 22:07:07.068884579 +0000 UTC m=+1119.513262729" lastFinishedPulling="2025-12-11 22:07:09.712460027 +0000 UTC m=+1122.156838177" observedRunningTime="2025-12-11 22:07:10.88537791 +0000 UTC m=+1123.329756100" watchObservedRunningTime="2025-12-11 22:07:10.972851252 +0000 UTC m=+1123.417229402" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.001876 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-worker-5549fbbc4f-bvswp" podStartSLOduration=2.22291016 podStartE2EDuration="5.001830016s" podCreationTimestamp="2025-12-11 22:07:06 +0000 UTC" firstStartedPulling="2025-12-11 22:07:06.938396102 +0000 UTC m=+1119.382774252" lastFinishedPulling="2025-12-11 22:07:09.717315958 +0000 UTC m=+1122.161694108" observedRunningTime="2025-12-11 22:07:10.997489909 +0000 UTC m=+1123.441868079" watchObservedRunningTime="2025-12-11 22:07:11.001830016 +0000 UTC m=+1123.446208176" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.300155 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82"] Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.301649 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.304343 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7p6h2" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.316741 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82"] Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.485101 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-bundle\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.485251 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-util\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.485286 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9pps\" (UniqueName: \"kubernetes.io/projected/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-kube-api-access-m9pps\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.586931 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9pps\" (UniqueName: \"kubernetes.io/projected/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-kube-api-access-m9pps\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.587273 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-bundle\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.587437 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-util\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.587947 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-util\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.588126 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-bundle\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.605796 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9pps\" (UniqueName: \"kubernetes.io/projected/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-kube-api-access-m9pps\") pod \"bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.617471 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:11 crc kubenswrapper[4956]: I1211 22:07:11.905453 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82"] Dec 11 22:07:12 crc kubenswrapper[4956]: I1211 22:07:12.889291 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" event={"ID":"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29","Type":"ContainerDied","Data":"bed4a84eecaa36f78625bc487400acef0fecb672037f6b1f45624a76feaba9a1"} Dec 11 22:07:12 crc kubenswrapper[4956]: I1211 22:07:12.889290 4956 generic.go:334] "Generic (PLEG): container finished" podID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerID="bed4a84eecaa36f78625bc487400acef0fecb672037f6b1f45624a76feaba9a1" exitCode=0 Dec 11 22:07:12 crc kubenswrapper[4956]: I1211 22:07:12.889911 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" event={"ID":"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29","Type":"ContainerStarted","Data":"1aa0b685686dc634453cff90f75a173ba011be8127f8338681ac868bba13b5a1"} Dec 11 22:07:13 crc kubenswrapper[4956]: I1211 22:07:13.927862 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" event={"ID":"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29","Type":"ContainerStarted","Data":"7c3c23342f532c238d4c37dd60d67470e10cfdda83a4ab7d64131e3c02d9aef7"} Dec 11 22:07:14 crc kubenswrapper[4956]: I1211 22:07:14.935289 4956 generic.go:334] "Generic (PLEG): container finished" podID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerID="7c3c23342f532c238d4c37dd60d67470e10cfdda83a4ab7d64131e3c02d9aef7" exitCode=0 Dec 11 22:07:14 crc kubenswrapper[4956]: I1211 22:07:14.935503 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" event={"ID":"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29","Type":"ContainerDied","Data":"7c3c23342f532c238d4c37dd60d67470e10cfdda83a4ab7d64131e3c02d9aef7"} Dec 11 22:07:15 crc kubenswrapper[4956]: I1211 22:07:15.949589 4956 generic.go:334] "Generic (PLEG): container finished" podID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerID="150fa85c7a562d69bd30dd76f05cfa76f8866184cbf6c77e0d3730301941dbfa" exitCode=0 Dec 11 22:07:15 crc kubenswrapper[4956]: I1211 22:07:15.949650 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" event={"ID":"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29","Type":"ContainerDied","Data":"150fa85c7a562d69bd30dd76f05cfa76f8866184cbf6c77e0d3730301941dbfa"} Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.582650 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.781063 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-util\") pod \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.781123 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9pps\" (UniqueName: \"kubernetes.io/projected/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-kube-api-access-m9pps\") pod \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.781171 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-bundle\") pod \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\" (UID: \"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29\") " Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.782247 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-bundle" (OuterVolumeSpecName: "bundle") pod "dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" (UID: "dfb081fe-bb0f-4b1b-ad99-12a07bf52c29"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.782619 4956 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.787162 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-kube-api-access-m9pps" (OuterVolumeSpecName: "kube-api-access-m9pps") pod "dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" (UID: "dfb081fe-bb0f-4b1b-ad99-12a07bf52c29"). InnerVolumeSpecName "kube-api-access-m9pps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.799137 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-util" (OuterVolumeSpecName: "util") pod "dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" (UID: "dfb081fe-bb0f-4b1b-ad99-12a07bf52c29"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.883537 4956 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-util\") on node \"crc\" DevicePath \"\"" Dec 11 22:07:17 crc kubenswrapper[4956]: I1211 22:07:17.883592 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9pps\" (UniqueName: \"kubernetes.io/projected/dfb081fe-bb0f-4b1b-ad99-12a07bf52c29-kube-api-access-m9pps\") on node \"crc\" DevicePath \"\"" Dec 11 22:07:18 crc kubenswrapper[4956]: I1211 22:07:18.117259 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" event={"ID":"dfb081fe-bb0f-4b1b-ad99-12a07bf52c29","Type":"ContainerDied","Data":"1aa0b685686dc634453cff90f75a173ba011be8127f8338681ac868bba13b5a1"} Dec 11 22:07:18 crc kubenswrapper[4956]: I1211 22:07:18.117332 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1aa0b685686dc634453cff90f75a173ba011be8127f8338681ac868bba13b5a1" Dec 11 22:07:18 crc kubenswrapper[4956]: I1211 22:07:18.117724 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82" Dec 11 22:07:18 crc kubenswrapper[4956]: I1211 22:07:18.892636 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:18 crc kubenswrapper[4956]: I1211 22:07:18.956212 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/barbican-api-84dc9b8b44-tt7zq" Dec 11 22:07:18 crc kubenswrapper[4956]: I1211 22:07:18.977191 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/keystone-85866bffd9-vxw6k" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.554442 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-8946f6b66-758s2"] Dec 11 22:07:31 crc kubenswrapper[4956]: E1211 22:07:31.555434 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="extract" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.555455 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="extract" Dec 11 22:07:31 crc kubenswrapper[4956]: E1211 22:07:31.555479 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="util" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.555488 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="util" Dec 11 22:07:31 crc kubenswrapper[4956]: E1211 22:07:31.555509 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="pull" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.555518 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="pull" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.555663 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfb081fe-bb0f-4b1b-ad99-12a07bf52c29" containerName="extract" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.556297 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.559479 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-fc5xk" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.560075 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-service-cert" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.570852 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8946f6b66-758s2"] Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.624421 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-apiservice-cert\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.624475 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-webhook-cert\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.624570 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4qd5\" (UniqueName: \"kubernetes.io/projected/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-kube-api-access-w4qd5\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.726102 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-apiservice-cert\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.726155 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-webhook-cert\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.726214 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4qd5\" (UniqueName: \"kubernetes.io/projected/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-kube-api-access-w4qd5\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.731356 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-apiservice-cert\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.735734 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-webhook-cert\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.754087 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4qd5\" (UniqueName: \"kubernetes.io/projected/7a35e2a8-1fe7-45be-8a6f-c1eab1761b13-kube-api-access-w4qd5\") pod \"swift-operator-controller-manager-8946f6b66-758s2\" (UID: \"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13\") " pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:31 crc kubenswrapper[4956]: I1211 22:07:31.878378 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:32 crc kubenswrapper[4956]: I1211 22:07:32.424453 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8946f6b66-758s2"] Dec 11 22:07:33 crc kubenswrapper[4956]: I1211 22:07:33.396121 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" event={"ID":"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13","Type":"ContainerStarted","Data":"1b374219a27fef1f1ac132bb72d84a0f97e3e4388715b6dca554c291e1531e84"} Dec 11 22:07:36 crc kubenswrapper[4956]: I1211 22:07:36.420572 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" event={"ID":"7a35e2a8-1fe7-45be-8a6f-c1eab1761b13","Type":"ContainerStarted","Data":"4a898e2fa3d5fe1992c85c1ef59106d325f73b829a50d5ff1e0a4501f4d06dc9"} Dec 11 22:07:36 crc kubenswrapper[4956]: I1211 22:07:36.421300 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:36 crc kubenswrapper[4956]: I1211 22:07:36.442447 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" podStartSLOduration=2.420273537 podStartE2EDuration="5.442420086s" podCreationTimestamp="2025-12-11 22:07:31 +0000 UTC" firstStartedPulling="2025-12-11 22:07:32.430177049 +0000 UTC m=+1144.874555199" lastFinishedPulling="2025-12-11 22:07:35.452323588 +0000 UTC m=+1147.896701748" observedRunningTime="2025-12-11 22:07:36.440188666 +0000 UTC m=+1148.884566816" watchObservedRunningTime="2025-12-11 22:07:36.442420086 +0000 UTC m=+1148.886798236" Dec 11 22:07:41 crc kubenswrapper[4956]: I1211 22:07:41.884224 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-8946f6b66-758s2" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.582971 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.588268 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.591287 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.591689 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.591692 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.593013 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-vhnmw" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.608599 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.740819 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.740894 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.740954 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45jfd\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-kube-api-access-45jfd\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.740978 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-cache\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.741033 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-lock\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.842659 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.842736 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.842822 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45jfd\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-kube-api-access-45jfd\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.842848 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-cache\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.842903 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-lock\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: E1211 22:07:44.843354 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:44 crc kubenswrapper[4956]: E1211 22:07:44.843407 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:07:44 crc kubenswrapper[4956]: E1211 22:07:44.843471 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift podName:a114c533-fe14-41f3-b4fc-6431a48cdfc9 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:45.343446706 +0000 UTC m=+1157.787824926 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift") pod "swift-storage-0" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9") : configmap "swift-ring-files" not found Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.843605 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-lock\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.844207 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-cache\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.844372 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") device mount path \"/mnt/openstack/pv07\"" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.866162 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45jfd\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-kube-api-access-45jfd\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:44 crc kubenswrapper[4956]: I1211 22:07:44.876658 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.142370 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-9xv55"] Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.143570 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.145609 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.146424 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.149187 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.167459 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-9xv55"] Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.248346 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-scripts\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.248732 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-ring-data-devices\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.248823 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rdwn\" (UniqueName: \"kubernetes.io/projected/0545a046-63cb-45bb-b8e8-01e4d8526113-kube-api-access-4rdwn\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.248878 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-swiftconf\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.248906 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-dispersionconf\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.248950 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0545a046-63cb-45bb-b8e8-01e4d8526113-etc-swift\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.350612 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-scripts\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351004 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-ring-data-devices\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351171 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rdwn\" (UniqueName: \"kubernetes.io/projected/0545a046-63cb-45bb-b8e8-01e4d8526113-kube-api-access-4rdwn\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351311 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351434 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-swiftconf\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351509 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-scripts\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: E1211 22:07:45.351592 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:45 crc kubenswrapper[4956]: E1211 22:07:45.351633 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351755 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-dispersionconf\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.351942 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0545a046-63cb-45bb-b8e8-01e4d8526113-etc-swift\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: E1211 22:07:45.352155 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift podName:a114c533-fe14-41f3-b4fc-6431a48cdfc9 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:46.352103245 +0000 UTC m=+1158.796481395 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift") pod "swift-storage-0" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9") : configmap "swift-ring-files" not found Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.352298 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-ring-data-devices\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.352788 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0545a046-63cb-45bb-b8e8-01e4d8526113-etc-swift\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.356941 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-swiftconf\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.367861 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-dispersionconf\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.369217 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rdwn\" (UniqueName: \"kubernetes.io/projected/0545a046-63cb-45bb-b8e8-01e4d8526113-kube-api-access-4rdwn\") pod \"swift-ring-rebalance-9xv55\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:45 crc kubenswrapper[4956]: I1211 22:07:45.461592 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.085309 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-9xv55"] Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.235066 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj"] Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.236290 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.252447 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj"] Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.376063 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.376165 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-log-httpd\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.376193 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-run-httpd\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.376222 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n96rr\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-kube-api-access-n96rr\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.376437 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.376528 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e222ed6-506d-4466-85c0-6e6354f42d68-config-data\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: E1211 22:07:46.376605 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:46 crc kubenswrapper[4956]: E1211 22:07:46.376631 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:07:46 crc kubenswrapper[4956]: E1211 22:07:46.376788 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift podName:a114c533-fe14-41f3-b4fc-6431a48cdfc9 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:48.376746739 +0000 UTC m=+1160.821124889 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift") pod "swift-storage-0" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9") : configmap "swift-ring-files" not found Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.483480 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-log-httpd\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.483534 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-run-httpd\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.483569 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n96rr\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-kube-api-access-n96rr\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.483641 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e222ed6-506d-4466-85c0-6e6354f42d68-config-data\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.483698 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: E1211 22:07:46.483885 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:46 crc kubenswrapper[4956]: E1211 22:07:46.483901 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj: configmap "swift-ring-files" not found Dec 11 22:07:46 crc kubenswrapper[4956]: E1211 22:07:46.483951 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift podName:5e222ed6-506d-4466-85c0-6e6354f42d68 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:46.983931924 +0000 UTC m=+1159.428310074 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift") pod "swift-proxy-7d4fb88647-tslmj" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68") : configmap "swift-ring-files" not found Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.484129 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-run-httpd\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.484195 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-log-httpd\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.649382 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e222ed6-506d-4466-85c0-6e6354f42d68-config-data\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.664532 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n96rr\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-kube-api-access-n96rr\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:46 crc kubenswrapper[4956]: I1211 22:07:46.674340 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" event={"ID":"0545a046-63cb-45bb-b8e8-01e4d8526113","Type":"ContainerStarted","Data":"ea4a3daa4e4c0412a3d2da50bd6d0efc137671c64cbd56185d18b4af2f0f82d5"} Dec 11 22:07:47 crc kubenswrapper[4956]: I1211 22:07:47.054870 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:47 crc kubenswrapper[4956]: E1211 22:07:47.055060 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:47 crc kubenswrapper[4956]: E1211 22:07:47.055106 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj: configmap "swift-ring-files" not found Dec 11 22:07:47 crc kubenswrapper[4956]: E1211 22:07:47.055183 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift podName:5e222ed6-506d-4466-85c0-6e6354f42d68 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:48.055164748 +0000 UTC m=+1160.499542898 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift") pod "swift-proxy-7d4fb88647-tslmj" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68") : configmap "swift-ring-files" not found Dec 11 22:07:48 crc kubenswrapper[4956]: I1211 22:07:48.069051 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:48 crc kubenswrapper[4956]: E1211 22:07:48.069255 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:48 crc kubenswrapper[4956]: E1211 22:07:48.069281 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj: configmap "swift-ring-files" not found Dec 11 22:07:48 crc kubenswrapper[4956]: E1211 22:07:48.069343 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift podName:5e222ed6-506d-4466-85c0-6e6354f42d68 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:50.069323918 +0000 UTC m=+1162.513702068 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift") pod "swift-proxy-7d4fb88647-tslmj" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68") : configmap "swift-ring-files" not found Dec 11 22:07:48 crc kubenswrapper[4956]: I1211 22:07:48.474502 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:48 crc kubenswrapper[4956]: E1211 22:07:48.474728 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:48 crc kubenswrapper[4956]: E1211 22:07:48.474780 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:07:48 crc kubenswrapper[4956]: E1211 22:07:48.474853 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift podName:a114c533-fe14-41f3-b4fc-6431a48cdfc9 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:52.4748312 +0000 UTC m=+1164.919209410 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift") pod "swift-storage-0" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9") : configmap "swift-ring-files" not found Dec 11 22:07:50 crc kubenswrapper[4956]: I1211 22:07:50.098824 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:50 crc kubenswrapper[4956]: E1211 22:07:50.098979 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:50 crc kubenswrapper[4956]: E1211 22:07:50.099248 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj: configmap "swift-ring-files" not found Dec 11 22:07:50 crc kubenswrapper[4956]: E1211 22:07:50.099298 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift podName:5e222ed6-506d-4466-85c0-6e6354f42d68 nodeName:}" failed. No retries permitted until 2025-12-11 22:07:54.099281092 +0000 UTC m=+1166.543659242 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift") pod "swift-proxy-7d4fb88647-tslmj" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68") : configmap "swift-ring-files" not found Dec 11 22:07:51 crc kubenswrapper[4956]: I1211 22:07:51.746308 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" event={"ID":"0545a046-63cb-45bb-b8e8-01e4d8526113","Type":"ContainerStarted","Data":"d1b3e1eae3a6852d757d436c0870da719cfaa0fe8df3d251bf5d0595bd07f739"} Dec 11 22:07:51 crc kubenswrapper[4956]: I1211 22:07:51.783737 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" podStartSLOduration=1.874461009 podStartE2EDuration="6.783715001s" podCreationTimestamp="2025-12-11 22:07:45 +0000 UTC" firstStartedPulling="2025-12-11 22:07:46.096340868 +0000 UTC m=+1158.540719028" lastFinishedPulling="2025-12-11 22:07:51.00559487 +0000 UTC m=+1163.449973020" observedRunningTime="2025-12-11 22:07:51.780739861 +0000 UTC m=+1164.225118011" watchObservedRunningTime="2025-12-11 22:07:51.783715001 +0000 UTC m=+1164.228093181" Dec 11 22:07:52 crc kubenswrapper[4956]: I1211 22:07:52.548649 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:07:52 crc kubenswrapper[4956]: E1211 22:07:52.548840 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:52 crc kubenswrapper[4956]: E1211 22:07:52.549114 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:07:52 crc kubenswrapper[4956]: E1211 22:07:52.549175 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift podName:a114c533-fe14-41f3-b4fc-6431a48cdfc9 nodeName:}" failed. No retries permitted until 2025-12-11 22:08:00.54915489 +0000 UTC m=+1172.993533040 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift") pod "swift-storage-0" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9") : configmap "swift-ring-files" not found Dec 11 22:07:54 crc kubenswrapper[4956]: I1211 22:07:54.171501 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:07:54 crc kubenswrapper[4956]: E1211 22:07:54.171641 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:07:54 crc kubenswrapper[4956]: E1211 22:07:54.171655 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj: configmap "swift-ring-files" not found Dec 11 22:07:54 crc kubenswrapper[4956]: E1211 22:07:54.171702 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift podName:5e222ed6-506d-4466-85c0-6e6354f42d68 nodeName:}" failed. No retries permitted until 2025-12-11 22:08:02.171689212 +0000 UTC m=+1174.616067362 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift") pod "swift-proxy-7d4fb88647-tslmj" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68") : configmap "swift-ring-files" not found Dec 11 22:07:58 crc kubenswrapper[4956]: I1211 22:07:58.797219 4956 generic.go:334] "Generic (PLEG): container finished" podID="0545a046-63cb-45bb-b8e8-01e4d8526113" containerID="d1b3e1eae3a6852d757d436c0870da719cfaa0fe8df3d251bf5d0595bd07f739" exitCode=0 Dec 11 22:07:58 crc kubenswrapper[4956]: I1211 22:07:58.797318 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" event={"ID":"0545a046-63cb-45bb-b8e8-01e4d8526113","Type":"ContainerDied","Data":"d1b3e1eae3a6852d757d436c0870da719cfaa0fe8df3d251bf5d0595bd07f739"} Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.117048 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.267499 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-swiftconf\") pod \"0545a046-63cb-45bb-b8e8-01e4d8526113\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.267920 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0545a046-63cb-45bb-b8e8-01e4d8526113-etc-swift\") pod \"0545a046-63cb-45bb-b8e8-01e4d8526113\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.267967 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-dispersionconf\") pod \"0545a046-63cb-45bb-b8e8-01e4d8526113\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.268028 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rdwn\" (UniqueName: \"kubernetes.io/projected/0545a046-63cb-45bb-b8e8-01e4d8526113-kube-api-access-4rdwn\") pod \"0545a046-63cb-45bb-b8e8-01e4d8526113\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.268097 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-scripts\") pod \"0545a046-63cb-45bb-b8e8-01e4d8526113\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.268118 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-ring-data-devices\") pod \"0545a046-63cb-45bb-b8e8-01e4d8526113\" (UID: \"0545a046-63cb-45bb-b8e8-01e4d8526113\") " Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.269069 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0545a046-63cb-45bb-b8e8-01e4d8526113-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0545a046-63cb-45bb-b8e8-01e4d8526113" (UID: "0545a046-63cb-45bb-b8e8-01e4d8526113"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.269296 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0545a046-63cb-45bb-b8e8-01e4d8526113" (UID: "0545a046-63cb-45bb-b8e8-01e4d8526113"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.275880 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0545a046-63cb-45bb-b8e8-01e4d8526113-kube-api-access-4rdwn" (OuterVolumeSpecName: "kube-api-access-4rdwn") pod "0545a046-63cb-45bb-b8e8-01e4d8526113" (UID: "0545a046-63cb-45bb-b8e8-01e4d8526113"). InnerVolumeSpecName "kube-api-access-4rdwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.289038 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0545a046-63cb-45bb-b8e8-01e4d8526113" (UID: "0545a046-63cb-45bb-b8e8-01e4d8526113"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.289711 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-scripts" (OuterVolumeSpecName: "scripts") pod "0545a046-63cb-45bb-b8e8-01e4d8526113" (UID: "0545a046-63cb-45bb-b8e8-01e4d8526113"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.302634 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0545a046-63cb-45bb-b8e8-01e4d8526113" (UID: "0545a046-63cb-45bb-b8e8-01e4d8526113"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.369521 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rdwn\" (UniqueName: \"kubernetes.io/projected/0545a046-63cb-45bb-b8e8-01e4d8526113-kube-api-access-4rdwn\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.369555 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.369564 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0545a046-63cb-45bb-b8e8-01e4d8526113-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.369572 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.369581 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0545a046-63cb-45bb-b8e8-01e4d8526113-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.369589 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0545a046-63cb-45bb-b8e8-01e4d8526113-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.572466 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.576475 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"swift-storage-0\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.808149 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.833669 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" event={"ID":"0545a046-63cb-45bb-b8e8-01e4d8526113","Type":"ContainerDied","Data":"ea4a3daa4e4c0412a3d2da50bd6d0efc137671c64cbd56185d18b4af2f0f82d5"} Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.833720 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea4a3daa4e4c0412a3d2da50bd6d0efc137671c64cbd56185d18b4af2f0f82d5" Dec 11 22:08:00 crc kubenswrapper[4956]: I1211 22:08:00.833800 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-9xv55" Dec 11 22:08:01 crc kubenswrapper[4956]: I1211 22:08:01.106630 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:01 crc kubenswrapper[4956]: I1211 22:08:01.236819 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:08:01 crc kubenswrapper[4956]: I1211 22:08:01.843687 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"579e5219991bb2beaa43e6e4fa094f5956d3f649392f2970a5d196f44e23db02"} Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.198407 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.203839 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"swift-proxy-7d4fb88647-tslmj\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.452692 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.831557 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.860345 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"f59b79b6d8c86bb83416f175bb80a0cdddbf06ca5094679bf78e941c18750b8c"} Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.860395 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"3b1b9e20037c7e86183cad618af5b2ebd5dad2173b0e829a17a9fd9ed798ebda"} Dec 11 22:08:02 crc kubenswrapper[4956]: I1211 22:08:02.980829 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj"] Dec 11 22:08:03 crc kubenswrapper[4956]: I1211 22:08:03.868424 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" event={"ID":"5e222ed6-506d-4466-85c0-6e6354f42d68","Type":"ContainerStarted","Data":"72c0985dfe46963c43bb2ceb143217bab3a2ca4c734ba40d5698d922f2c963e3"} Dec 11 22:08:03 crc kubenswrapper[4956]: I1211 22:08:03.868797 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" event={"ID":"5e222ed6-506d-4466-85c0-6e6354f42d68","Type":"ContainerStarted","Data":"716e94e5438f3e259debb370da0bb501f7a807af46f452383dcc4dd68e3a0b3c"} Dec 11 22:08:03 crc kubenswrapper[4956]: I1211 22:08:03.870406 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"5fa3011ef640c6b3cb258381deb019de5b934f0bde4e0a8db62c222b53729fa2"} Dec 11 22:08:03 crc kubenswrapper[4956]: I1211 22:08:03.870453 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"967bb04f5adac6da7467c43c1e7c07d69287cc4f6152349931e8523be8e8f9e7"} Dec 11 22:08:04 crc kubenswrapper[4956]: I1211 22:08:04.535816 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:04 crc kubenswrapper[4956]: I1211 22:08:04.884513 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" event={"ID":"5e222ed6-506d-4466-85c0-6e6354f42d68","Type":"ContainerStarted","Data":"63233aa4201adb722c58bd7a50251777650229c55307e7960a5760f113ef265d"} Dec 11 22:08:04 crc kubenswrapper[4956]: I1211 22:08:04.884698 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:04 crc kubenswrapper[4956]: I1211 22:08:04.884752 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:04 crc kubenswrapper[4956]: I1211 22:08:04.923190 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" podStartSLOduration=18.923172408 podStartE2EDuration="18.923172408s" podCreationTimestamp="2025-12-11 22:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:08:04.917391501 +0000 UTC m=+1177.361769681" watchObservedRunningTime="2025-12-11 22:08:04.923172408 +0000 UTC m=+1177.367550558" Dec 11 22:08:06 crc kubenswrapper[4956]: I1211 22:08:06.151341 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:06 crc kubenswrapper[4956]: I1211 22:08:06.912588 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"6ec9035471b611e18061f20ad241f8fc6ece2de36cc35130c16f910a95863a8b"} Dec 11 22:08:06 crc kubenswrapper[4956]: I1211 22:08:06.912970 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"eb593dfd2e409aa5f57fb4dd556d6d93c289515777083c34eeb0e7bdf499f344"} Dec 11 22:08:06 crc kubenswrapper[4956]: I1211 22:08:06.912984 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"df3404b3569b427c5eff58b9f7d038cdf6fb11ae6f0a0354045aa6803cd80793"} Dec 11 22:08:07 crc kubenswrapper[4956]: I1211 22:08:07.744639 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:07 crc kubenswrapper[4956]: I1211 22:08:07.934610 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"7f816b3f9aa72639f2ad16ba973317a588bdc1f18f52e6ddd4912403e426b8aa"} Dec 11 22:08:08 crc kubenswrapper[4956]: I1211 22:08:08.946857 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"084213c369816602ce36728603116f761776815ba0c4b828ba0e9094d25a9eda"} Dec 11 22:08:08 crc kubenswrapper[4956]: I1211 22:08:08.947145 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"c21ec3f9ec5961890abc669a7c0714d8c107d1beca6635731859ef921ba478c2"} Dec 11 22:08:08 crc kubenswrapper[4956]: I1211 22:08:08.947156 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"335652464f7bf5e1650d8ef6b0d1b7a9bccddd7f71328a98d1ef1d8dc46a9237"} Dec 11 22:08:09 crc kubenswrapper[4956]: I1211 22:08:09.388240 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:09 crc kubenswrapper[4956]: I1211 22:08:09.957520 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"a561c4c3234210c526decac52c4967b36f018a2a4d281f447274298666b8bb3a"} Dec 11 22:08:09 crc kubenswrapper[4956]: I1211 22:08:09.957565 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"e39c8f5bdc73de90a8bf47fa38aa96752c5f9023258cdf6fd726f578f66114c7"} Dec 11 22:08:09 crc kubenswrapper[4956]: I1211 22:08:09.957580 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"8d9db6ba9c013b3aceeb0d432f3f1108b671d9f936b333ee509d9287459701e2"} Dec 11 22:08:10 crc kubenswrapper[4956]: I1211 22:08:10.929428 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:10 crc kubenswrapper[4956]: I1211 22:08:10.970433 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerStarted","Data":"eb0c06a1ac084268e08f264062ea798166f63ba8292f6943eeea7d7e186694fe"} Dec 11 22:08:11 crc kubenswrapper[4956]: I1211 22:08:11.000680 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=21.270871318 podStartE2EDuration="28.00066273s" podCreationTimestamp="2025-12-11 22:07:43 +0000 UTC" firstStartedPulling="2025-12-11 22:08:01.243541871 +0000 UTC m=+1173.687920021" lastFinishedPulling="2025-12-11 22:08:07.973333283 +0000 UTC m=+1180.417711433" observedRunningTime="2025-12-11 22:08:10.998347157 +0000 UTC m=+1183.442725307" watchObservedRunningTime="2025-12-11 22:08:11.00066273 +0000 UTC m=+1183.445040880" Dec 11 22:08:12 crc kubenswrapper[4956]: I1211 22:08:12.455403 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:12 crc kubenswrapper[4956]: I1211 22:08:12.455935 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:12 crc kubenswrapper[4956]: I1211 22:08:12.500125 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:14 crc kubenswrapper[4956]: I1211 22:08:14.045616 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-9xv55_0545a046-63cb-45bb-b8e8-01e4d8526113/swift-ring-rebalance/0.log" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.440708 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:08:15 crc kubenswrapper[4956]: E1211 22:08:15.441038 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0545a046-63cb-45bb-b8e8-01e4d8526113" containerName="swift-ring-rebalance" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.441052 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="0545a046-63cb-45bb-b8e8-01e4d8526113" containerName="swift-ring-rebalance" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.441169 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="0545a046-63cb-45bb-b8e8-01e4d8526113" containerName="swift-ring-rebalance" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.446521 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.467033 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.472713 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.474715 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.486515 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.521243 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnskr\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-kube-api-access-hnskr\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.521330 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-cache\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.521369 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.521393 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-etc-swift\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.521420 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-lock\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623127 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pht4x\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-kube-api-access-pht4x\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623219 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-cache\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623266 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-lock\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623286 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623307 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623328 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-etc-swift\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623345 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-cache\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623374 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-lock\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623390 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-etc-swift\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.623409 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnskr\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-kube-api-access-hnskr\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.624165 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-cache\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.624577 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") device mount path \"/mnt/openstack/pv06\"" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.637635 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-lock\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.642478 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-etc-swift\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.652473 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnskr\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-kube-api-access-hnskr\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.654957 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-2\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.724719 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-lock\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.724781 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.724809 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-cache\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.724837 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-etc-swift\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.724877 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pht4x\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-kube-api-access-pht4x\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.725633 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-lock\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.725730 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") device mount path \"/mnt/openstack/pv01\"" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.726328 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-cache\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.735448 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-etc-swift\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.746003 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pht4x\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-kube-api-access-pht4x\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.749661 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-1\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.766927 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.815919 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.817680 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-9xv55"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.829141 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-9xv55"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.841051 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-cpfjp"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.842174 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.847738 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.848253 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.855803 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-cpfjp"] Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.928683 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-scripts\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.929200 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-swiftconf\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.929271 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-ring-data-devices\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.929307 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q96vd\" (UniqueName: \"kubernetes.io/projected/cb4116c5-e52a-4285-a4c9-6731af2a4614-kube-api-access-q96vd\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.929372 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-dispersionconf\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:15 crc kubenswrapper[4956]: I1211 22:08:15.929426 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/cb4116c5-e52a-4285-a4c9-6731af2a4614-etc-swift\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.030682 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-dispersionconf\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.030756 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/cb4116c5-e52a-4285-a4c9-6731af2a4614-etc-swift\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.030841 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-scripts\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.030965 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-swiftconf\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.031010 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-ring-data-devices\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.031066 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q96vd\" (UniqueName: \"kubernetes.io/projected/cb4116c5-e52a-4285-a4c9-6731af2a4614-kube-api-access-q96vd\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.032202 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-scripts\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.032532 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/cb4116c5-e52a-4285-a4c9-6731af2a4614-etc-swift\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.030757 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0545a046-63cb-45bb-b8e8-01e4d8526113" path="/var/lib/kubelet/pods/0545a046-63cb-45bb-b8e8-01e4d8526113/volumes" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.033257 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-ring-data-devices\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.036437 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-dispersionconf\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.042205 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-swiftconf\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.055242 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q96vd\" (UniqueName: \"kubernetes.io/projected/cb4116c5-e52a-4285-a4c9-6731af2a4614-kube-api-access-q96vd\") pod \"swift-ring-rebalance-cpfjp\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.117561 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.164551 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.387125 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:08:16 crc kubenswrapper[4956]: W1211 22:08:16.397685 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41ffa609_e4fd_42af_b5bb_eeda0fca28be.slice/crio-1117a2351db8427981846289248f133b5a336a8ded17a5d06b6800d209b92f7c WatchSource:0}: Error finding container 1117a2351db8427981846289248f133b5a336a8ded17a5d06b6800d209b92f7c: Status 404 returned error can't find the container with id 1117a2351db8427981846289248f133b5a336a8ded17a5d06b6800d209b92f7c Dec 11 22:08:16 crc kubenswrapper[4956]: I1211 22:08:16.398518 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-cpfjp"] Dec 11 22:08:16 crc kubenswrapper[4956]: W1211 22:08:16.407705 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb4116c5_e52a_4285_a4c9_6731af2a4614.slice/crio-32fa66816d5a7b02c5f58c02ddc34ef59016c5781b5fc6a8d9d7eb922ca74ae0 WatchSource:0}: Error finding container 32fa66816d5a7b02c5f58c02ddc34ef59016c5781b5fc6a8d9d7eb922ca74ae0: Status 404 returned error can't find the container with id 32fa66816d5a7b02c5f58c02ddc34ef59016c5781b5fc6a8d9d7eb922ca74ae0 Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.048410 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"2435489d05e6fe368d33194469d365927b43d7cf85f6a3d60022925f9a4f27bb"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.048757 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"f3c99d9956bfc929e57948603b9d53a18d43469dfe98a68080ac495d23746def"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.048795 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.048808 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"41198b5fff9883f116ceb0d78258610dee7a5a0a3d42b18691063079e7b12741"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.057577 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" event={"ID":"cb4116c5-e52a-4285-a4c9-6731af2a4614","Type":"ContainerStarted","Data":"dffb601064de95f9027bf6a1fe306997f65ac96acff81e28575f1d442ba6fa03"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.057663 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" event={"ID":"cb4116c5-e52a-4285-a4c9-6731af2a4614","Type":"ContainerStarted","Data":"32fa66816d5a7b02c5f58c02ddc34ef59016c5781b5fc6a8d9d7eb922ca74ae0"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.065013 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.065106 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.065140 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"1117a2351db8427981846289248f133b5a336a8ded17a5d06b6800d209b92f7c"} Dec 11 22:08:17 crc kubenswrapper[4956]: I1211 22:08:17.079796 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" podStartSLOduration=2.079764167 podStartE2EDuration="2.079764167s" podCreationTimestamp="2025-12-11 22:08:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:08:17.078838622 +0000 UTC m=+1189.523216782" watchObservedRunningTime="2025-12-11 22:08:17.079764167 +0000 UTC m=+1189.524142307" Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.107591 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.107879 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.107890 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.107899 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.107908 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.121347 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"f63154460a3110e5255b3ef2fe24ccdd055d84f3bad6882295d3d709f130f286"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.121387 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"26c1c73dcf0c5b39c4650886e10e51e3546fdbba8f61bbc344044f4d8589fa89"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.121396 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"abc834d6a7065ba9aba75a616670e471fcad6c34902e868c09cd27a3200f0f50"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.121405 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"5585fc1523add48f6aaa6482f0fd078a810c938185772132e926ad6267cd67b5"} Dec 11 22:08:18 crc kubenswrapper[4956]: I1211 22:08:18.121413 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"f55235b10356a944991fa8ae283b28fde5f8ad0369f476676bb3ffa23c942fe1"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.237953 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.238300 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.238314 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.238325 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.314045 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"3274a782fa544ad8c1816d0d0440d068cddba402e63527e803cda3b1edb031b0"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.314088 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"85861c8a818f17c968073a0311f6ea4844080a4856017d4b623cc27d3d844bf8"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.314101 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"11d07ad78fd327b49162b3c6e2a87af5fe34aed448f5005225d89ab2a00714ed"} Dec 11 22:08:19 crc kubenswrapper[4956]: I1211 22:08:19.314112 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"9d4aed0b7b74b22c6bddf686ce2036cb23fae0e337eb638f319bab7788fae09f"} Dec 11 22:08:20 crc kubenswrapper[4956]: I1211 22:08:20.328579 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2"} Dec 11 22:08:20 crc kubenswrapper[4956]: I1211 22:08:20.328633 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a"} Dec 11 22:08:20 crc kubenswrapper[4956]: I1211 22:08:20.339799 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"f1d46d139c19bce7d4f122fa6e17d51f3b7304f2d735d5649d973e4333e10d74"} Dec 11 22:08:20 crc kubenswrapper[4956]: I1211 22:08:20.339843 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"fba3b880188be2c5242c9e67acd79ea380838385f3cd14d4847817fbf375c7a7"} Dec 11 22:08:20 crc kubenswrapper[4956]: I1211 22:08:20.339852 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerStarted","Data":"03f3cc627480f8b066b74efbbd184ebcd415769b99dd460d8995c199560c97d5"} Dec 11 22:08:21 crc kubenswrapper[4956]: I1211 22:08:21.361485 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2"} Dec 11 22:08:21 crc kubenswrapper[4956]: I1211 22:08:21.361810 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerStarted","Data":"8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5"} Dec 11 22:08:21 crc kubenswrapper[4956]: I1211 22:08:21.391007 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-1" podStartSLOduration=7.390988413 podStartE2EDuration="7.390988413s" podCreationTimestamp="2025-12-11 22:08:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:08:21.390298415 +0000 UTC m=+1193.834676585" watchObservedRunningTime="2025-12-11 22:08:21.390988413 +0000 UTC m=+1193.835366573" Dec 11 22:08:21 crc kubenswrapper[4956]: I1211 22:08:21.431641 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-2" podStartSLOduration=7.431616764 podStartE2EDuration="7.431616764s" podCreationTimestamp="2025-12-11 22:08:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:08:21.424631715 +0000 UTC m=+1193.869009885" watchObservedRunningTime="2025-12-11 22:08:21.431616764 +0000 UTC m=+1193.875994914" Dec 11 22:08:28 crc kubenswrapper[4956]: I1211 22:08:28.514723 4956 generic.go:334] "Generic (PLEG): container finished" podID="cb4116c5-e52a-4285-a4c9-6731af2a4614" containerID="dffb601064de95f9027bf6a1fe306997f65ac96acff81e28575f1d442ba6fa03" exitCode=0 Dec 11 22:08:28 crc kubenswrapper[4956]: I1211 22:08:28.515311 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" event={"ID":"cb4116c5-e52a-4285-a4c9-6731af2a4614","Type":"ContainerDied","Data":"dffb601064de95f9027bf6a1fe306997f65ac96acff81e28575f1d442ba6fa03"} Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.794605 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.841237 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q96vd\" (UniqueName: \"kubernetes.io/projected/cb4116c5-e52a-4285-a4c9-6731af2a4614-kube-api-access-q96vd\") pod \"cb4116c5-e52a-4285-a4c9-6731af2a4614\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.841331 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-scripts\") pod \"cb4116c5-e52a-4285-a4c9-6731af2a4614\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.841370 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-swiftconf\") pod \"cb4116c5-e52a-4285-a4c9-6731af2a4614\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.841412 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-ring-data-devices\") pod \"cb4116c5-e52a-4285-a4c9-6731af2a4614\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.841458 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/cb4116c5-e52a-4285-a4c9-6731af2a4614-etc-swift\") pod \"cb4116c5-e52a-4285-a4c9-6731af2a4614\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.841491 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-dispersionconf\") pod \"cb4116c5-e52a-4285-a4c9-6731af2a4614\" (UID: \"cb4116c5-e52a-4285-a4c9-6731af2a4614\") " Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.842572 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "cb4116c5-e52a-4285-a4c9-6731af2a4614" (UID: "cb4116c5-e52a-4285-a4c9-6731af2a4614"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.842685 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb4116c5-e52a-4285-a4c9-6731af2a4614-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "cb4116c5-e52a-4285-a4c9-6731af2a4614" (UID: "cb4116c5-e52a-4285-a4c9-6731af2a4614"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.846714 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb4116c5-e52a-4285-a4c9-6731af2a4614-kube-api-access-q96vd" (OuterVolumeSpecName: "kube-api-access-q96vd") pod "cb4116c5-e52a-4285-a4c9-6731af2a4614" (UID: "cb4116c5-e52a-4285-a4c9-6731af2a4614"). InnerVolumeSpecName "kube-api-access-q96vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.860611 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-scripts" (OuterVolumeSpecName: "scripts") pod "cb4116c5-e52a-4285-a4c9-6731af2a4614" (UID: "cb4116c5-e52a-4285-a4c9-6731af2a4614"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.864516 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "cb4116c5-e52a-4285-a4c9-6731af2a4614" (UID: "cb4116c5-e52a-4285-a4c9-6731af2a4614"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.871859 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "cb4116c5-e52a-4285-a4c9-6731af2a4614" (UID: "cb4116c5-e52a-4285-a4c9-6731af2a4614"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.943696 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/cb4116c5-e52a-4285-a4c9-6731af2a4614-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.943732 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.943751 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q96vd\" (UniqueName: \"kubernetes.io/projected/cb4116c5-e52a-4285-a4c9-6731af2a4614-kube-api-access-q96vd\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.943787 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.943799 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/cb4116c5-e52a-4285-a4c9-6731af2a4614-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:29 crc kubenswrapper[4956]: I1211 22:08:29.943811 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/cb4116c5-e52a-4285-a4c9-6731af2a4614-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.528971 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" event={"ID":"cb4116c5-e52a-4285-a4c9-6731af2a4614","Type":"ContainerDied","Data":"32fa66816d5a7b02c5f58c02ddc34ef59016c5781b5fc6a8d9d7eb922ca74ae0"} Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.529018 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32fa66816d5a7b02c5f58c02ddc34ef59016c5781b5fc6a8d9d7eb922ca74ae0" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.529027 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-cpfjp" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.815537 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn"] Dec 11 22:08:30 crc kubenswrapper[4956]: E1211 22:08:30.816119 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb4116c5-e52a-4285-a4c9-6731af2a4614" containerName="swift-ring-rebalance" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.816131 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb4116c5-e52a-4285-a4c9-6731af2a4614" containerName="swift-ring-rebalance" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.816264 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb4116c5-e52a-4285-a4c9-6731af2a4614" containerName="swift-ring-rebalance" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.816745 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.818592 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.818677 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.827012 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn"] Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.856327 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-dispersionconf\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.856379 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/50ada03f-eece-46e8-b8de-a3aafb231d77-etc-swift\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.856413 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-swiftconf\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.856456 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csb82\" (UniqueName: \"kubernetes.io/projected/50ada03f-eece-46e8-b8de-a3aafb231d77-kube-api-access-csb82\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.856496 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-ring-data-devices\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.856534 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-scripts\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.957445 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csb82\" (UniqueName: \"kubernetes.io/projected/50ada03f-eece-46e8-b8de-a3aafb231d77-kube-api-access-csb82\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.957521 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-ring-data-devices\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.957554 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-scripts\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.957627 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-dispersionconf\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.957651 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/50ada03f-eece-46e8-b8de-a3aafb231d77-etc-swift\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.957679 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-swiftconf\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.958618 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/50ada03f-eece-46e8-b8de-a3aafb231d77-etc-swift\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.958652 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-scripts\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.959076 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-ring-data-devices\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.962579 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-dispersionconf\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.962791 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-swiftconf\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:30 crc kubenswrapper[4956]: I1211 22:08:30.974440 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csb82\" (UniqueName: \"kubernetes.io/projected/50ada03f-eece-46e8-b8de-a3aafb231d77-kube-api-access-csb82\") pod \"swift-ring-rebalance-debug-hrrfn\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:31 crc kubenswrapper[4956]: I1211 22:08:31.146994 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:31 crc kubenswrapper[4956]: I1211 22:08:31.574253 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn"] Dec 11 22:08:31 crc kubenswrapper[4956]: W1211 22:08:31.581625 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50ada03f_eece_46e8_b8de_a3aafb231d77.slice/crio-ab03d2c09d39faf6abe9b0958d761df430add74936413697b360c7760e519d66 WatchSource:0}: Error finding container ab03d2c09d39faf6abe9b0958d761df430add74936413697b360c7760e519d66: Status 404 returned error can't find the container with id ab03d2c09d39faf6abe9b0958d761df430add74936413697b360c7760e519d66 Dec 11 22:08:32 crc kubenswrapper[4956]: I1211 22:08:32.544203 4956 generic.go:334] "Generic (PLEG): container finished" podID="50ada03f-eece-46e8-b8de-a3aafb231d77" containerID="194cc3818cffce97b39f14bece32908e19b8f09396b7b0c7a512273c0f5eb5b4" exitCode=0 Dec 11 22:08:32 crc kubenswrapper[4956]: I1211 22:08:32.544275 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" event={"ID":"50ada03f-eece-46e8-b8de-a3aafb231d77","Type":"ContainerDied","Data":"194cc3818cffce97b39f14bece32908e19b8f09396b7b0c7a512273c0f5eb5b4"} Dec 11 22:08:32 crc kubenswrapper[4956]: I1211 22:08:32.545548 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" event={"ID":"50ada03f-eece-46e8-b8de-a3aafb231d77","Type":"ContainerStarted","Data":"ab03d2c09d39faf6abe9b0958d761df430add74936413697b360c7760e519d66"} Dec 11 22:08:32 crc kubenswrapper[4956]: I1211 22:08:32.580903 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn"] Dec 11 22:08:32 crc kubenswrapper[4956]: I1211 22:08:32.587736 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn"] Dec 11 22:08:33 crc kubenswrapper[4956]: I1211 22:08:33.957395 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.034132 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/50ada03f-eece-46e8-b8de-a3aafb231d77-etc-swift\") pod \"50ada03f-eece-46e8-b8de-a3aafb231d77\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.034570 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-dispersionconf\") pod \"50ada03f-eece-46e8-b8de-a3aafb231d77\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.034624 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csb82\" (UniqueName: \"kubernetes.io/projected/50ada03f-eece-46e8-b8de-a3aafb231d77-kube-api-access-csb82\") pod \"50ada03f-eece-46e8-b8de-a3aafb231d77\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.034728 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-scripts\") pod \"50ada03f-eece-46e8-b8de-a3aafb231d77\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.034884 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-swiftconf\") pod \"50ada03f-eece-46e8-b8de-a3aafb231d77\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.034963 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-ring-data-devices\") pod \"50ada03f-eece-46e8-b8de-a3aafb231d77\" (UID: \"50ada03f-eece-46e8-b8de-a3aafb231d77\") " Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.035509 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "50ada03f-eece-46e8-b8de-a3aafb231d77" (UID: "50ada03f-eece-46e8-b8de-a3aafb231d77"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.035704 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50ada03f-eece-46e8-b8de-a3aafb231d77-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "50ada03f-eece-46e8-b8de-a3aafb231d77" (UID: "50ada03f-eece-46e8-b8de-a3aafb231d77"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.037152 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.037180 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/50ada03f-eece-46e8-b8de-a3aafb231d77-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.039745 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50ada03f-eece-46e8-b8de-a3aafb231d77-kube-api-access-csb82" (OuterVolumeSpecName: "kube-api-access-csb82") pod "50ada03f-eece-46e8-b8de-a3aafb231d77" (UID: "50ada03f-eece-46e8-b8de-a3aafb231d77"). InnerVolumeSpecName "kube-api-access-csb82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.057409 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "50ada03f-eece-46e8-b8de-a3aafb231d77" (UID: "50ada03f-eece-46e8-b8de-a3aafb231d77"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.069427 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "50ada03f-eece-46e8-b8de-a3aafb231d77" (UID: "50ada03f-eece-46e8-b8de-a3aafb231d77"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.069883 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-scripts" (OuterVolumeSpecName: "scripts") pod "50ada03f-eece-46e8-b8de-a3aafb231d77" (UID: "50ada03f-eece-46e8-b8de-a3aafb231d77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.099732 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jcchf"] Dec 11 22:08:34 crc kubenswrapper[4956]: E1211 22:08:34.101232 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50ada03f-eece-46e8-b8de-a3aafb231d77" containerName="swift-ring-rebalance" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.101255 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="50ada03f-eece-46e8-b8de-a3aafb231d77" containerName="swift-ring-rebalance" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.101449 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="50ada03f-eece-46e8-b8de-a3aafb231d77" containerName="swift-ring-rebalance" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.102072 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.107833 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jcchf"] Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139508 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-scripts\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139571 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-ring-data-devices\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139615 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4032c9d-3388-41cc-b413-63b2a1610c01-etc-swift\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139679 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-dispersionconf\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139706 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-swiftconf\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139741 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mmfl\" (UniqueName: \"kubernetes.io/projected/f4032c9d-3388-41cc-b413-63b2a1610c01-kube-api-access-8mmfl\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139860 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/50ada03f-eece-46e8-b8de-a3aafb231d77-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139876 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139888 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/50ada03f-eece-46e8-b8de-a3aafb231d77-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.139902 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csb82\" (UniqueName: \"kubernetes.io/projected/50ada03f-eece-46e8-b8de-a3aafb231d77-kube-api-access-csb82\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.241541 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-scripts\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.241598 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-ring-data-devices\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.241643 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4032c9d-3388-41cc-b413-63b2a1610c01-etc-swift\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.241694 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-dispersionconf\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.241716 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-swiftconf\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.241743 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mmfl\" (UniqueName: \"kubernetes.io/projected/f4032c9d-3388-41cc-b413-63b2a1610c01-kube-api-access-8mmfl\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.242285 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-scripts\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.242732 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4032c9d-3388-41cc-b413-63b2a1610c01-etc-swift\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.243569 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-ring-data-devices\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.245509 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-dispersionconf\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.245558 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-swiftconf\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.265829 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mmfl\" (UniqueName: \"kubernetes.io/projected/f4032c9d-3388-41cc-b413-63b2a1610c01-kube-api-access-8mmfl\") pod \"swift-ring-rebalance-debug-jcchf\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.435167 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.563528 4956 scope.go:117] "RemoveContainer" containerID="194cc3818cffce97b39f14bece32908e19b8f09396b7b0c7a512273c0f5eb5b4" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.563660 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hrrfn" Dec 11 22:08:34 crc kubenswrapper[4956]: I1211 22:08:34.635430 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jcchf"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.571783 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4032c9d-3388-41cc-b413-63b2a1610c01" containerID="58a758241406edb0e7446bb5428e6341c8b2badaf53162050f9550c0928c7127" exitCode=0 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.571858 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" event={"ID":"f4032c9d-3388-41cc-b413-63b2a1610c01","Type":"ContainerDied","Data":"58a758241406edb0e7446bb5428e6341c8b2badaf53162050f9550c0928c7127"} Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.572127 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" event={"ID":"f4032c9d-3388-41cc-b413-63b2a1610c01","Type":"ContainerStarted","Data":"0f989f90cd31aad5f90ffac68f5d07707d25df226a2e5b50b5156d53aaeae806"} Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.615619 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jcchf"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.622558 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-jcchf"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714358 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714834 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-server" containerID="cri-o://f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714868 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="rsync" containerID="cri-o://fba3b880188be2c5242c9e67acd79ea380838385f3cd14d4847817fbf375c7a7" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714967 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-auditor" containerID="cri-o://2435489d05e6fe368d33194469d365927b43d7cf85f6a3d60022925f9a4f27bb" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714895 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="swift-recon-cron" containerID="cri-o://f1d46d139c19bce7d4f122fa6e17d51f3b7304f2d735d5649d973e4333e10d74" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714926 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-reaper" containerID="cri-o://f55235b10356a944991fa8ae283b28fde5f8ad0369f476676bb3ffa23c942fe1" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714942 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-replicator" containerID="cri-o://f3c99d9956bfc929e57948603b9d53a18d43469dfe98a68080ac495d23746def" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714954 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-auditor" containerID="cri-o://26c1c73dcf0c5b39c4650886e10e51e3546fdbba8f61bbc344044f4d8589fa89" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714961 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-replicator" containerID="cri-o://abc834d6a7065ba9aba75a616670e471fcad6c34902e868c09cd27a3200f0f50" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714987 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-replicator" containerID="cri-o://11d07ad78fd327b49162b3c6e2a87af5fe34aed448f5005225d89ab2a00714ed" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714989 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-server" containerID="cri-o://9d4aed0b7b74b22c6bddf686ce2036cb23fae0e337eb638f319bab7788fae09f" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.715001 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-updater" containerID="cri-o://3274a782fa544ad8c1816d0d0440d068cddba402e63527e803cda3b1edb031b0" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714960 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-auditor" containerID="cri-o://85861c8a818f17c968073a0311f6ea4844080a4856017d4b623cc27d3d844bf8" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.714905 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-server" containerID="cri-o://5585fc1523add48f6aaa6482f0fd078a810c938185772132e926ad6267cd67b5" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.716918 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-updater" containerID="cri-o://f63154460a3110e5255b3ef2fe24ccdd055d84f3bad6882295d3d709f130f286" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.715009 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-expirer" containerID="cri-o://03f3cc627480f8b066b74efbbd184ebcd415769b99dd460d8995c199560c97d5" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.752453 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753077 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-server" containerID="cri-o://3b1b9e20037c7e86183cad618af5b2ebd5dad2173b0e829a17a9fd9ed798ebda" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753146 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-server" containerID="cri-o://335652464f7bf5e1650d8ef6b0d1b7a9bccddd7f71328a98d1ef1d8dc46a9237" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753236 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-updater" containerID="cri-o://7f816b3f9aa72639f2ad16ba973317a588bdc1f18f52e6ddd4912403e426b8aa" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753288 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-auditor" containerID="cri-o://6ec9035471b611e18061f20ad241f8fc6ece2de36cc35130c16f910a95863a8b" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753333 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-replicator" containerID="cri-o://eb593dfd2e409aa5f57fb4dd556d6d93c289515777083c34eeb0e7bdf499f344" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753378 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-server" containerID="cri-o://df3404b3569b427c5eff58b9f7d038cdf6fb11ae6f0a0354045aa6803cd80793" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753423 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-reaper" containerID="cri-o://5fa3011ef640c6b3cb258381deb019de5b934f0bde4e0a8db62c222b53729fa2" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753466 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-auditor" containerID="cri-o://967bb04f5adac6da7467c43c1e7c07d69287cc4f6152349931e8523be8e8f9e7" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753485 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="swift-recon-cron" containerID="cri-o://eb0c06a1ac084268e08f264062ea798166f63ba8292f6943eeea7d7e186694fe" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753509 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-replicator" containerID="cri-o://f59b79b6d8c86bb83416f175bb80a0cdddbf06ca5094679bf78e941c18750b8c" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753638 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="rsync" containerID="cri-o://a561c4c3234210c526decac52c4967b36f018a2a4d281f447274298666b8bb3a" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753653 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-updater" containerID="cri-o://8d9db6ba9c013b3aceeb0d432f3f1108b671d9f936b333ee509d9287459701e2" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753688 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-auditor" containerID="cri-o://084213c369816602ce36728603116f761776815ba0c4b828ba0e9094d25a9eda" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753700 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-expirer" containerID="cri-o://e39c8f5bdc73de90a8bf47fa38aa96752c5f9023258cdf6fd726f578f66114c7" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.753722 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-replicator" containerID="cri-o://c21ec3f9ec5961890abc669a7c0714d8c107d1beca6635731859ef921ba478c2" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.764456 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-cpfjp"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.789874 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.790441 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-server" containerID="cri-o://1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.790832 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="swift-recon-cron" containerID="cri-o://d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.790884 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="rsync" containerID="cri-o://8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.790919 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-expirer" containerID="cri-o://2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.790975 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-updater" containerID="cri-o://ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791009 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-auditor" containerID="cri-o://c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791043 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-replicator" containerID="cri-o://e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791079 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-server" containerID="cri-o://771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791122 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-updater" containerID="cri-o://6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791160 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-auditor" containerID="cri-o://9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791195 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-replicator" containerID="cri-o://72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791233 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-server" containerID="cri-o://0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791271 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-reaper" containerID="cri-o://6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791315 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-auditor" containerID="cri-o://01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.791352 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-replicator" containerID="cri-o://f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.801204 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-cpfjp"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.815947 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj"] Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.816235 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-httpd" containerID="cri-o://72c0985dfe46963c43bb2ceb143217bab3a2ca4c734ba40d5698d922f2c963e3" gracePeriod=30 Dec 11 22:08:35 crc kubenswrapper[4956]: I1211 22:08:35.816676 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-server" containerID="cri-o://63233aa4201adb722c58bd7a50251777650229c55307e7960a5760f113ef265d" gracePeriod=30 Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.032501 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50ada03f-eece-46e8-b8de-a3aafb231d77" path="/var/lib/kubelet/pods/50ada03f-eece-46e8-b8de-a3aafb231d77/volumes" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.033642 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb4116c5-e52a-4285-a4c9-6731af2a4614" path="/var/lib/kubelet/pods/cb4116c5-e52a-4285-a4c9-6731af2a4614/volumes" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.587576 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="f63154460a3110e5255b3ef2fe24ccdd055d84f3bad6882295d3d709f130f286" exitCode=0 Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.587612 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"f63154460a3110e5255b3ef2fe24ccdd055d84f3bad6882295d3d709f130f286"} Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.785873 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.882433 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-dispersionconf\") pod \"f4032c9d-3388-41cc-b413-63b2a1610c01\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.882487 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-ring-data-devices\") pod \"f4032c9d-3388-41cc-b413-63b2a1610c01\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.882613 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-scripts\") pod \"f4032c9d-3388-41cc-b413-63b2a1610c01\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.882633 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4032c9d-3388-41cc-b413-63b2a1610c01-etc-swift\") pod \"f4032c9d-3388-41cc-b413-63b2a1610c01\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.882650 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-swiftconf\") pod \"f4032c9d-3388-41cc-b413-63b2a1610c01\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.882678 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mmfl\" (UniqueName: \"kubernetes.io/projected/f4032c9d-3388-41cc-b413-63b2a1610c01-kube-api-access-8mmfl\") pod \"f4032c9d-3388-41cc-b413-63b2a1610c01\" (UID: \"f4032c9d-3388-41cc-b413-63b2a1610c01\") " Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.883123 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "f4032c9d-3388-41cc-b413-63b2a1610c01" (UID: "f4032c9d-3388-41cc-b413-63b2a1610c01"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.885727 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4032c9d-3388-41cc-b413-63b2a1610c01-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f4032c9d-3388-41cc-b413-63b2a1610c01" (UID: "f4032c9d-3388-41cc-b413-63b2a1610c01"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.889450 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4032c9d-3388-41cc-b413-63b2a1610c01-kube-api-access-8mmfl" (OuterVolumeSpecName: "kube-api-access-8mmfl") pod "f4032c9d-3388-41cc-b413-63b2a1610c01" (UID: "f4032c9d-3388-41cc-b413-63b2a1610c01"). InnerVolumeSpecName "kube-api-access-8mmfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.921383 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-scripts" (OuterVolumeSpecName: "scripts") pod "f4032c9d-3388-41cc-b413-63b2a1610c01" (UID: "f4032c9d-3388-41cc-b413-63b2a1610c01"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.923365 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "f4032c9d-3388-41cc-b413-63b2a1610c01" (UID: "f4032c9d-3388-41cc-b413-63b2a1610c01"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.937362 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "f4032c9d-3388-41cc-b413-63b2a1610c01" (UID: "f4032c9d-3388-41cc-b413-63b2a1610c01"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.984329 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.984374 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4032c9d-3388-41cc-b413-63b2a1610c01-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.984394 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.984411 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mmfl\" (UniqueName: \"kubernetes.io/projected/f4032c9d-3388-41cc-b413-63b2a1610c01-kube-api-access-8mmfl\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.984427 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4032c9d-3388-41cc-b413-63b2a1610c01-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:36 crc kubenswrapper[4956]: I1211 22:08:36.984441 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4032c9d-3388-41cc-b413-63b2a1610c01-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.455415 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-server" probeResult="failure" output="Get \"http://10.217.0.97:8080/healthcheck\": dial tcp 10.217.0.97:8080: connect: connection refused" Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.455626 4956 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.97:8080/healthcheck\": dial tcp 10.217.0.97:8080: connect: connection refused" Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609388 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="a561c4c3234210c526decac52c4967b36f018a2a4d281f447274298666b8bb3a" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609425 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="e39c8f5bdc73de90a8bf47fa38aa96752c5f9023258cdf6fd726f578f66114c7" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609438 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="8d9db6ba9c013b3aceeb0d432f3f1108b671d9f936b333ee509d9287459701e2" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609448 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="084213c369816602ce36728603116f761776815ba0c4b828ba0e9094d25a9eda" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609459 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="c21ec3f9ec5961890abc669a7c0714d8c107d1beca6635731859ef921ba478c2" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609470 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="335652464f7bf5e1650d8ef6b0d1b7a9bccddd7f71328a98d1ef1d8dc46a9237" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609479 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="7f816b3f9aa72639f2ad16ba973317a588bdc1f18f52e6ddd4912403e426b8aa" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609487 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="6ec9035471b611e18061f20ad241f8fc6ece2de36cc35130c16f910a95863a8b" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609496 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="eb593dfd2e409aa5f57fb4dd556d6d93c289515777083c34eeb0e7bdf499f344" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609493 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"a561c4c3234210c526decac52c4967b36f018a2a4d281f447274298666b8bb3a"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609539 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"e39c8f5bdc73de90a8bf47fa38aa96752c5f9023258cdf6fd726f578f66114c7"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609555 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"8d9db6ba9c013b3aceeb0d432f3f1108b671d9f936b333ee509d9287459701e2"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609565 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"084213c369816602ce36728603116f761776815ba0c4b828ba0e9094d25a9eda"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609577 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"c21ec3f9ec5961890abc669a7c0714d8c107d1beca6635731859ef921ba478c2"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609588 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"335652464f7bf5e1650d8ef6b0d1b7a9bccddd7f71328a98d1ef1d8dc46a9237"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609506 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="df3404b3569b427c5eff58b9f7d038cdf6fb11ae6f0a0354045aa6803cd80793" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609599 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"7f816b3f9aa72639f2ad16ba973317a588bdc1f18f52e6ddd4912403e426b8aa"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609606 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="5fa3011ef640c6b3cb258381deb019de5b934f0bde4e0a8db62c222b53729fa2" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609611 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"6ec9035471b611e18061f20ad241f8fc6ece2de36cc35130c16f910a95863a8b"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609622 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"eb593dfd2e409aa5f57fb4dd556d6d93c289515777083c34eeb0e7bdf499f344"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609635 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"df3404b3569b427c5eff58b9f7d038cdf6fb11ae6f0a0354045aa6803cd80793"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609647 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"5fa3011ef640c6b3cb258381deb019de5b934f0bde4e0a8db62c222b53729fa2"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609659 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"967bb04f5adac6da7467c43c1e7c07d69287cc4f6152349931e8523be8e8f9e7"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609616 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="967bb04f5adac6da7467c43c1e7c07d69287cc4f6152349931e8523be8e8f9e7" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609678 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="f59b79b6d8c86bb83416f175bb80a0cdddbf06ca5094679bf78e941c18750b8c" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609688 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="3b1b9e20037c7e86183cad618af5b2ebd5dad2173b0e829a17a9fd9ed798ebda" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609737 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"f59b79b6d8c86bb83416f175bb80a0cdddbf06ca5094679bf78e941c18750b8c"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.609752 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"3b1b9e20037c7e86183cad618af5b2ebd5dad2173b0e829a17a9fd9ed798ebda"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619562 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619592 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619600 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619608 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619615 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619623 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619624 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619661 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619676 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619688 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619702 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619713 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619723 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619631 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619743 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619753 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619761 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619787 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619795 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619792 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619821 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619831 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619839 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619803 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619848 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619851 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619858 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.619867 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.621914 4956 generic.go:334] "Generic (PLEG): container finished" podID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerID="63233aa4201adb722c58bd7a50251777650229c55307e7960a5760f113ef265d" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.621937 4956 generic.go:334] "Generic (PLEG): container finished" podID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerID="72c0985dfe46963c43bb2ceb143217bab3a2ca4c734ba40d5698d922f2c963e3" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.621984 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" event={"ID":"5e222ed6-506d-4466-85c0-6e6354f42d68","Type":"ContainerDied","Data":"63233aa4201adb722c58bd7a50251777650229c55307e7960a5760f113ef265d"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.622016 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" event={"ID":"5e222ed6-506d-4466-85c0-6e6354f42d68","Type":"ContainerDied","Data":"72c0985dfe46963c43bb2ceb143217bab3a2ca4c734ba40d5698d922f2c963e3"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627715 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="fba3b880188be2c5242c9e67acd79ea380838385f3cd14d4847817fbf375c7a7" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627743 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="03f3cc627480f8b066b74efbbd184ebcd415769b99dd460d8995c199560c97d5" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627756 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="3274a782fa544ad8c1816d0d0440d068cddba402e63527e803cda3b1edb031b0" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627765 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="85861c8a818f17c968073a0311f6ea4844080a4856017d4b623cc27d3d844bf8" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627791 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="11d07ad78fd327b49162b3c6e2a87af5fe34aed448f5005225d89ab2a00714ed" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627795 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"fba3b880188be2c5242c9e67acd79ea380838385f3cd14d4847817fbf375c7a7"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627821 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"03f3cc627480f8b066b74efbbd184ebcd415769b99dd460d8995c199560c97d5"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627835 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"3274a782fa544ad8c1816d0d0440d068cddba402e63527e803cda3b1edb031b0"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627846 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"85861c8a818f17c968073a0311f6ea4844080a4856017d4b623cc27d3d844bf8"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627857 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"11d07ad78fd327b49162b3c6e2a87af5fe34aed448f5005225d89ab2a00714ed"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627869 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"9d4aed0b7b74b22c6bddf686ce2036cb23fae0e337eb638f319bab7788fae09f"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627800 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="9d4aed0b7b74b22c6bddf686ce2036cb23fae0e337eb638f319bab7788fae09f" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627888 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="26c1c73dcf0c5b39c4650886e10e51e3546fdbba8f61bbc344044f4d8589fa89" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627899 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="abc834d6a7065ba9aba75a616670e471fcad6c34902e868c09cd27a3200f0f50" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627909 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="5585fc1523add48f6aaa6482f0fd078a810c938185772132e926ad6267cd67b5" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627918 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="f55235b10356a944991fa8ae283b28fde5f8ad0369f476676bb3ffa23c942fe1" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627926 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="2435489d05e6fe368d33194469d365927b43d7cf85f6a3d60022925f9a4f27bb" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627934 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="f3c99d9956bfc929e57948603b9d53a18d43469dfe98a68080ac495d23746def" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627943 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b" exitCode=0 Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627963 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"26c1c73dcf0c5b39c4650886e10e51e3546fdbba8f61bbc344044f4d8589fa89"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627986 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"abc834d6a7065ba9aba75a616670e471fcad6c34902e868c09cd27a3200f0f50"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.627997 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"5585fc1523add48f6aaa6482f0fd078a810c938185772132e926ad6267cd67b5"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.628011 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"f55235b10356a944991fa8ae283b28fde5f8ad0369f476676bb3ffa23c942fe1"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.628020 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"2435489d05e6fe368d33194469d365927b43d7cf85f6a3d60022925f9a4f27bb"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.628028 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"f3c99d9956bfc929e57948603b9d53a18d43469dfe98a68080ac495d23746def"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.628036 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b"} Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.629914 4956 scope.go:117] "RemoveContainer" containerID="58a758241406edb0e7446bb5428e6341c8b2badaf53162050f9550c0928c7127" Dec 11 22:08:37 crc kubenswrapper[4956]: I1211 22:08:37.629935 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-jcchf" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.038164 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4032c9d-3388-41cc-b413-63b2a1610c01" path="/var/lib/kubelet/pods/f4032c9d-3388-41cc-b413-63b2a1610c01/volumes" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.485437 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506484 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-run-httpd\") pod \"5e222ed6-506d-4466-85c0-6e6354f42d68\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506568 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-log-httpd\") pod \"5e222ed6-506d-4466-85c0-6e6354f42d68\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506595 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e222ed6-506d-4466-85c0-6e6354f42d68-config-data\") pod \"5e222ed6-506d-4466-85c0-6e6354f42d68\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506656 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") pod \"5e222ed6-506d-4466-85c0-6e6354f42d68\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506747 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n96rr\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-kube-api-access-n96rr\") pod \"5e222ed6-506d-4466-85c0-6e6354f42d68\" (UID: \"5e222ed6-506d-4466-85c0-6e6354f42d68\") " Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506887 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5e222ed6-506d-4466-85c0-6e6354f42d68" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.506964 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5e222ed6-506d-4466-85c0-6e6354f42d68" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.507108 4956 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.507121 4956 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e222ed6-506d-4466-85c0-6e6354f42d68-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.519287 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-kube-api-access-n96rr" (OuterVolumeSpecName: "kube-api-access-n96rr") pod "5e222ed6-506d-4466-85c0-6e6354f42d68" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68"). InnerVolumeSpecName "kube-api-access-n96rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.523065 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "5e222ed6-506d-4466-85c0-6e6354f42d68" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.543448 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e222ed6-506d-4466-85c0-6e6354f42d68-config-data" (OuterVolumeSpecName: "config-data") pod "5e222ed6-506d-4466-85c0-6e6354f42d68" (UID: "5e222ed6-506d-4466-85c0-6e6354f42d68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.608820 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.608849 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n96rr\" (UniqueName: \"kubernetes.io/projected/5e222ed6-506d-4466-85c0-6e6354f42d68-kube-api-access-n96rr\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.608860 4956 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e222ed6-506d-4466-85c0-6e6354f42d68-config-data\") on node \"crc\" DevicePath \"\"" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.639318 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" event={"ID":"5e222ed6-506d-4466-85c0-6e6354f42d68","Type":"ContainerDied","Data":"716e94e5438f3e259debb370da0bb501f7a807af46f452383dcc4dd68e3a0b3c"} Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.639371 4956 scope.go:117] "RemoveContainer" containerID="63233aa4201adb722c58bd7a50251777650229c55307e7960a5760f113ef265d" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.639456 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.672584 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj"] Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.673502 4956 scope.go:117] "RemoveContainer" containerID="72c0985dfe46963c43bb2ceb143217bab3a2ca4c734ba40d5698d922f2c963e3" Dec 11 22:08:38 crc kubenswrapper[4956]: I1211 22:08:38.680981 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-tslmj"] Dec 11 22:08:40 crc kubenswrapper[4956]: I1211 22:08:40.031861 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" path="/var/lib/kubelet/pods/5e222ed6-506d-4466-85c0-6e6354f42d68/volumes" Dec 11 22:08:46 crc kubenswrapper[4956]: I1211 22:08:46.888048 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:08:46 crc kubenswrapper[4956]: I1211 22:08:46.888710 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:09:05 crc kubenswrapper[4956]: I1211 22:09:05.898129 4956 generic.go:334] "Generic (PLEG): container finished" podID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerID="f1d46d139c19bce7d4f122fa6e17d51f3b7304f2d735d5649d973e4333e10d74" exitCode=137 Dec 11 22:09:05 crc kubenswrapper[4956]: I1211 22:09:05.898346 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"f1d46d139c19bce7d4f122fa6e17d51f3b7304f2d735d5649d973e4333e10d74"} Dec 11 22:09:05 crc kubenswrapper[4956]: I1211 22:09:05.916174 4956 generic.go:334] "Generic (PLEG): container finished" podID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerID="eb0c06a1ac084268e08f264062ea798166f63ba8292f6943eeea7d7e186694fe" exitCode=137 Dec 11 22:09:05 crc kubenswrapper[4956]: I1211 22:09:05.916249 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"eb0c06a1ac084268e08f264062ea798166f63ba8292f6943eeea7d7e186694fe"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.181013 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.209958 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.245778 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.325891 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-cache\") pod \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326275 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") pod \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326300 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326323 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-etc-swift\") pod \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326362 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-lock\") pod \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326405 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-lock\") pod \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326454 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-cache\") pod \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326498 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45jfd\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-kube-api-access-45jfd\") pod \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\" (UID: \"a114c533-fe14-41f3-b4fc-6431a48cdfc9\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326524 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnskr\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-kube-api-access-hnskr\") pod \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326544 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\" (UID: \"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326549 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-cache" (OuterVolumeSpecName: "cache") pod "a114c533-fe14-41f3-b4fc-6431a48cdfc9" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.326845 4956 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-cache\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.327542 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-lock" (OuterVolumeSpecName: "lock") pod "a114c533-fe14-41f3-b4fc-6431a48cdfc9" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.328069 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-cache" (OuterVolumeSpecName: "cache") pod "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" (UID: "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.328218 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-lock" (OuterVolumeSpecName: "lock") pod "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" (UID: "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.332297 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "a114c533-fe14-41f3-b4fc-6431a48cdfc9" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.332502 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "swift") pod "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" (UID: "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.332550 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-kube-api-access-45jfd" (OuterVolumeSpecName: "kube-api-access-45jfd") pod "a114c533-fe14-41f3-b4fc-6431a48cdfc9" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9"). InnerVolumeSpecName "kube-api-access-45jfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.332308 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" (UID: "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.332591 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a114c533-fe14-41f3-b4fc-6431a48cdfc9" (UID: "a114c533-fe14-41f3-b4fc-6431a48cdfc9"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.332740 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-kube-api-access-hnskr" (OuterVolumeSpecName: "kube-api-access-hnskr") pod "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" (UID: "6e5ed4e3-d63b-4cd8-b886-483ca0abc75e"). InnerVolumeSpecName "kube-api-access-hnskr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.428058 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-lock\") pod \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.428394 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pht4x\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-kube-api-access-pht4x\") pod \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.428568 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.428742 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-cache\") pod \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.428970 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-etc-swift\") pod \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\" (UID: \"41ffa609-e4fd-42af-b5bb-eeda0fca28be\") " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.429463 4956 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.429564 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.429654 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.429744 4956 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a114c533-fe14-41f3-b4fc-6431a48cdfc9-lock\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.429846 4956 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-lock\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.429935 4956 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-cache\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.430016 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45jfd\" (UniqueName: \"kubernetes.io/projected/a114c533-fe14-41f3-b4fc-6431a48cdfc9-kube-api-access-45jfd\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.430096 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnskr\" (UniqueName: \"kubernetes.io/projected/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e-kube-api-access-hnskr\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.430187 4956 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.428577 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-lock" (OuterVolumeSpecName: "lock") pod "41ffa609-e4fd-42af-b5bb-eeda0fca28be" (UID: "41ffa609-e4fd-42af-b5bb-eeda0fca28be"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.431154 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-cache" (OuterVolumeSpecName: "cache") pod "41ffa609-e4fd-42af-b5bb-eeda0fca28be" (UID: "41ffa609-e4fd-42af-b5bb-eeda0fca28be"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.431955 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "swift") pod "41ffa609-e4fd-42af-b5bb-eeda0fca28be" (UID: "41ffa609-e4fd-42af-b5bb-eeda0fca28be"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.437969 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-kube-api-access-pht4x" (OuterVolumeSpecName: "kube-api-access-pht4x") pod "41ffa609-e4fd-42af-b5bb-eeda0fca28be" (UID: "41ffa609-e4fd-42af-b5bb-eeda0fca28be"). InnerVolumeSpecName "kube-api-access-pht4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.438899 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "41ffa609-e4fd-42af-b5bb-eeda0fca28be" (UID: "41ffa609-e4fd-42af-b5bb-eeda0fca28be"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.460838 4956 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.463475 4956 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532087 4956 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-lock\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532127 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pht4x\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-kube-api-access-pht4x\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532163 4956 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532175 4956 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532185 4956 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/41ffa609-e4fd-42af-b5bb-eeda0fca28be-cache\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532193 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41ffa609-e4fd-42af-b5bb-eeda0fca28be-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.532203 4956 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.542756 4956 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.633625 4956 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.929805 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"a114c533-fe14-41f3-b4fc-6431a48cdfc9","Type":"ContainerDied","Data":"579e5219991bb2beaa43e6e4fa094f5956d3f649392f2970a5d196f44e23db02"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.929867 4956 scope.go:117] "RemoveContainer" containerID="eb0c06a1ac084268e08f264062ea798166f63ba8292f6943eeea7d7e186694fe" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.930065 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.939976 4956 generic.go:334] "Generic (PLEG): container finished" podID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerID="d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2" exitCode=137 Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.940079 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.940928 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.940977 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"41ffa609-e4fd-42af-b5bb-eeda0fca28be","Type":"ContainerDied","Data":"1117a2351db8427981846289248f133b5a336a8ded17a5d06b6800d209b92f7c"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950093 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"6e5ed4e3-d63b-4cd8-b886-483ca0abc75e","Type":"ContainerDied","Data":"41198b5fff9883f116ceb0d78258610dee7a5a0a3d42b18691063079e7b12741"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950142 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d4aed0b7b74b22c6bddf686ce2036cb23fae0e337eb638f319bab7788fae09f"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950156 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f63154460a3110e5255b3ef2fe24ccdd055d84f3bad6882295d3d709f130f286"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950164 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"26c1c73dcf0c5b39c4650886e10e51e3546fdbba8f61bbc344044f4d8589fa89"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950173 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"abc834d6a7065ba9aba75a616670e471fcad6c34902e868c09cd27a3200f0f50"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950180 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5585fc1523add48f6aaa6482f0fd078a810c938185772132e926ad6267cd67b5"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950186 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f55235b10356a944991fa8ae283b28fde5f8ad0369f476676bb3ffa23c942fe1"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950193 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2435489d05e6fe368d33194469d365927b43d7cf85f6a3d60022925f9a4f27bb"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950199 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f3c99d9956bfc929e57948603b9d53a18d43469dfe98a68080ac495d23746def"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950206 4956 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b"} Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.950240 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.969872 4956 scope.go:117] "RemoveContainer" containerID="a561c4c3234210c526decac52c4967b36f018a2a4d281f447274298666b8bb3a" Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.980802 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:09:06 crc kubenswrapper[4956]: I1211 22:09:06.990727 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.018473 4956 scope.go:117] "RemoveContainer" containerID="e39c8f5bdc73de90a8bf47fa38aa96752c5f9023258cdf6fd726f578f66114c7" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.021496 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.028619 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.038873 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.042357 4956 scope.go:117] "RemoveContainer" containerID="8d9db6ba9c013b3aceeb0d432f3f1108b671d9f936b333ee509d9287459701e2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.045365 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.061616 4956 scope.go:117] "RemoveContainer" containerID="084213c369816602ce36728603116f761776815ba0c4b828ba0e9094d25a9eda" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.080061 4956 scope.go:117] "RemoveContainer" containerID="c21ec3f9ec5961890abc669a7c0714d8c107d1beca6635731859ef921ba478c2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.098940 4956 scope.go:117] "RemoveContainer" containerID="335652464f7bf5e1650d8ef6b0d1b7a9bccddd7f71328a98d1ef1d8dc46a9237" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.117607 4956 scope.go:117] "RemoveContainer" containerID="7f816b3f9aa72639f2ad16ba973317a588bdc1f18f52e6ddd4912403e426b8aa" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.131268 4956 scope.go:117] "RemoveContainer" containerID="6ec9035471b611e18061f20ad241f8fc6ece2de36cc35130c16f910a95863a8b" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.147130 4956 scope.go:117] "RemoveContainer" containerID="eb593dfd2e409aa5f57fb4dd556d6d93c289515777083c34eeb0e7bdf499f344" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.170603 4956 scope.go:117] "RemoveContainer" containerID="df3404b3569b427c5eff58b9f7d038cdf6fb11ae6f0a0354045aa6803cd80793" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.188332 4956 scope.go:117] "RemoveContainer" containerID="5fa3011ef640c6b3cb258381deb019de5b934f0bde4e0a8db62c222b53729fa2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.207861 4956 scope.go:117] "RemoveContainer" containerID="967bb04f5adac6da7467c43c1e7c07d69287cc4f6152349931e8523be8e8f9e7" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.222261 4956 scope.go:117] "RemoveContainer" containerID="f59b79b6d8c86bb83416f175bb80a0cdddbf06ca5094679bf78e941c18750b8c" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.244761 4956 scope.go:117] "RemoveContainer" containerID="3b1b9e20037c7e86183cad618af5b2ebd5dad2173b0e829a17a9fd9ed798ebda" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.264304 4956 scope.go:117] "RemoveContainer" containerID="d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.291541 4956 scope.go:117] "RemoveContainer" containerID="8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.312006 4956 scope.go:117] "RemoveContainer" containerID="2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.327844 4956 scope.go:117] "RemoveContainer" containerID="ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.346980 4956 scope.go:117] "RemoveContainer" containerID="c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.365018 4956 scope.go:117] "RemoveContainer" containerID="e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.382689 4956 scope.go:117] "RemoveContainer" containerID="771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.404753 4956 scope.go:117] "RemoveContainer" containerID="6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.419626 4956 scope.go:117] "RemoveContainer" containerID="9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.434525 4956 scope.go:117] "RemoveContainer" containerID="72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.456695 4956 scope.go:117] "RemoveContainer" containerID="0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.475284 4956 scope.go:117] "RemoveContainer" containerID="6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.493467 4956 scope.go:117] "RemoveContainer" containerID="01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.509962 4956 scope.go:117] "RemoveContainer" containerID="f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.524063 4956 scope.go:117] "RemoveContainer" containerID="1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.539725 4956 scope.go:117] "RemoveContainer" containerID="d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.540250 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2\": container with ID starting with d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2 not found: ID does not exist" containerID="d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.540298 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2"} err="failed to get container status \"d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2\": rpc error: code = NotFound desc = could not find container \"d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2\": container with ID starting with d11b27f83a4a6e373a896c9ace325eefde941f0504041268ed4a1e33ce528ca2 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.540333 4956 scope.go:117] "RemoveContainer" containerID="8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.540640 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5\": container with ID starting with 8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5 not found: ID does not exist" containerID="8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.540676 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5"} err="failed to get container status \"8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5\": rpc error: code = NotFound desc = could not find container \"8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5\": container with ID starting with 8bede34505de1118ea6c6f2138158e39b74f4de1e0a373cd8163df49da3467e5 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.540700 4956 scope.go:117] "RemoveContainer" containerID="2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.540972 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2\": container with ID starting with 2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2 not found: ID does not exist" containerID="2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.540997 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2"} err="failed to get container status \"2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2\": rpc error: code = NotFound desc = could not find container \"2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2\": container with ID starting with 2afe808db81f1f547d5e43a75174d6726075cec8eba7c43fbac29d747036d0d2 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.541011 4956 scope.go:117] "RemoveContainer" containerID="ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.541275 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a\": container with ID starting with ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a not found: ID does not exist" containerID="ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.541294 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a"} err="failed to get container status \"ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a\": rpc error: code = NotFound desc = could not find container \"ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a\": container with ID starting with ba04ba1a1aae9d4fdcef5abebb4e49f3250a3b585e7e8d5dc5228a86c1bc9f9a not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.541308 4956 scope.go:117] "RemoveContainer" containerID="c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.542061 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92\": container with ID starting with c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92 not found: ID does not exist" containerID="c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.542080 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92"} err="failed to get container status \"c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92\": rpc error: code = NotFound desc = could not find container \"c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92\": container with ID starting with c65bc5ab07d0b1c95e58bc177c8e8940a7634af61a15b39da1832947f78faf92 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.542095 4956 scope.go:117] "RemoveContainer" containerID="e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.542449 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70\": container with ID starting with e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70 not found: ID does not exist" containerID="e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.542471 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70"} err="failed to get container status \"e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70\": rpc error: code = NotFound desc = could not find container \"e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70\": container with ID starting with e80cbfd4c359ece72706a0bb70f950fdbd439cb876c0d775d64a392a1e2eba70 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.542483 4956 scope.go:117] "RemoveContainer" containerID="771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.542725 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48\": container with ID starting with 771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48 not found: ID does not exist" containerID="771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.542746 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48"} err="failed to get container status \"771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48\": rpc error: code = NotFound desc = could not find container \"771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48\": container with ID starting with 771b4bea890b0dbb9c0e838f2a4041968f361f01433fbe789b4c65fc0d26ce48 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.542757 4956 scope.go:117] "RemoveContainer" containerID="6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.543056 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c\": container with ID starting with 6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c not found: ID does not exist" containerID="6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.543077 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c"} err="failed to get container status \"6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c\": rpc error: code = NotFound desc = could not find container \"6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c\": container with ID starting with 6cd81b71ea21b926b16de87485bb007c2d3967bc12fcbabd00d34e3ee3bde59c not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.543089 4956 scope.go:117] "RemoveContainer" containerID="9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.543403 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0\": container with ID starting with 9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0 not found: ID does not exist" containerID="9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.543422 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0"} err="failed to get container status \"9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0\": rpc error: code = NotFound desc = could not find container \"9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0\": container with ID starting with 9f04271319e47c3c4d91a1f09a641162e56b39afe178688bc0a1c70ba7f240e0 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.543435 4956 scope.go:117] "RemoveContainer" containerID="72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.543701 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2\": container with ID starting with 72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2 not found: ID does not exist" containerID="72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.543721 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2"} err="failed to get container status \"72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2\": rpc error: code = NotFound desc = could not find container \"72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2\": container with ID starting with 72447e0c769b939455f456831547067dc18164cedbeb060390ebcf30009c56f2 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.543734 4956 scope.go:117] "RemoveContainer" containerID="0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.544074 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3\": container with ID starting with 0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3 not found: ID does not exist" containerID="0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.544097 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3"} err="failed to get container status \"0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3\": rpc error: code = NotFound desc = could not find container \"0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3\": container with ID starting with 0309652fc5f357df3b54b72b8a66e1c9d2c233b4ee1a56a15051304572a14de3 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.544109 4956 scope.go:117] "RemoveContainer" containerID="6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.544600 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300\": container with ID starting with 6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300 not found: ID does not exist" containerID="6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.544622 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300"} err="failed to get container status \"6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300\": rpc error: code = NotFound desc = could not find container \"6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300\": container with ID starting with 6bbb8038c04c11e53b08f438dcff124a7afe25a3f3009090b7c8030a0a927300 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.544641 4956 scope.go:117] "RemoveContainer" containerID="01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.544905 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60\": container with ID starting with 01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60 not found: ID does not exist" containerID="01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.544927 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60"} err="failed to get container status \"01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60\": rpc error: code = NotFound desc = could not find container \"01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60\": container with ID starting with 01afedf1ad2cfb2fe5ede90de33b275c23506950b6517d31da319c73ce03de60 not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.544940 4956 scope.go:117] "RemoveContainer" containerID="f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.545352 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b\": container with ID starting with f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b not found: ID does not exist" containerID="f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.545375 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b"} err="failed to get container status \"f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b\": rpc error: code = NotFound desc = could not find container \"f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b\": container with ID starting with f4b63f70057b4692f3d70ec7b615692707eb7d542fd26618e90be529876ee66b not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.545387 4956 scope.go:117] "RemoveContainer" containerID="1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b" Dec 11 22:09:07 crc kubenswrapper[4956]: E1211 22:09:07.545650 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b\": container with ID starting with 1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b not found: ID does not exist" containerID="1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.545672 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b"} err="failed to get container status \"1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b\": rpc error: code = NotFound desc = could not find container \"1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b\": container with ID starting with 1e6d56fd50a4321561ddb2408995bd963ef98e87ccf27adaf9ab6757e7eef72b not found: ID does not exist" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.545685 4956 scope.go:117] "RemoveContainer" containerID="f1d46d139c19bce7d4f122fa6e17d51f3b7304f2d735d5649d973e4333e10d74" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.574503 4956 scope.go:117] "RemoveContainer" containerID="fba3b880188be2c5242c9e67acd79ea380838385f3cd14d4847817fbf375c7a7" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.605046 4956 scope.go:117] "RemoveContainer" containerID="03f3cc627480f8b066b74efbbd184ebcd415769b99dd460d8995c199560c97d5" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.627315 4956 scope.go:117] "RemoveContainer" containerID="3274a782fa544ad8c1816d0d0440d068cddba402e63527e803cda3b1edb031b0" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.644143 4956 scope.go:117] "RemoveContainer" containerID="85861c8a818f17c968073a0311f6ea4844080a4856017d4b623cc27d3d844bf8" Dec 11 22:09:07 crc kubenswrapper[4956]: I1211 22:09:07.662128 4956 scope.go:117] "RemoveContainer" containerID="11d07ad78fd327b49162b3c6e2a87af5fe34aed448f5005225d89ab2a00714ed" Dec 11 22:09:08 crc kubenswrapper[4956]: I1211 22:09:08.030897 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" path="/var/lib/kubelet/pods/41ffa609-e4fd-42af-b5bb-eeda0fca28be/volumes" Dec 11 22:09:08 crc kubenswrapper[4956]: I1211 22:09:08.033545 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" path="/var/lib/kubelet/pods/6e5ed4e3-d63b-4cd8-b886-483ca0abc75e/volumes" Dec 11 22:09:08 crc kubenswrapper[4956]: I1211 22:09:08.036196 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" path="/var/lib/kubelet/pods/a114c533-fe14-41f3-b4fc-6431a48cdfc9/volumes" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.970464 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.970998 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971010 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971019 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971025 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971038 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971046 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971055 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971060 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971068 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971074 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971085 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971090 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971097 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971103 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971109 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971115 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971121 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971126 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971137 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4032c9d-3388-41cc-b413-63b2a1610c01" containerName="swift-ring-rebalance" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971143 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4032c9d-3388-41cc-b413-63b2a1610c01" containerName="swift-ring-rebalance" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971151 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971157 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971165 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971171 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971179 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971186 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971194 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971200 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971207 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971217 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971227 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971232 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971248 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971256 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971265 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971270 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971279 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971285 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971296 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971303 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971311 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971318 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971327 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971335 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971349 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971357 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971371 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971379 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971390 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971397 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971407 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971413 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971421 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971427 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971435 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971442 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971450 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971455 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971465 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971470 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971477 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971482 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971490 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971495 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971501 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971506 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971514 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971519 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971528 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971533 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971542 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971548 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971557 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971563 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971572 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971578 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971587 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971593 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971601 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971606 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971615 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971620 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971629 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-httpd" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971636 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-httpd" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971647 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971653 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971663 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971671 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971683 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971691 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971702 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971709 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971719 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971724 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: E1211 22:09:09.971731 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971737 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971872 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971884 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971892 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4032c9d-3388-41cc-b413-63b2a1610c01" containerName="swift-ring-rebalance" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971903 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971912 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971919 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971930 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971937 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971944 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971953 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971961 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971967 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971976 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971982 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971988 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.971996 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972001 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972010 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972016 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972023 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-reaper" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972031 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972037 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972045 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972051 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972058 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972063 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972070 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972076 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972084 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972092 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972099 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="swift-recon-cron" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972105 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972111 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="account-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972119 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972127 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="container-replicator" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972133 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972141 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972149 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972156 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e222ed6-506d-4466-85c0-6e6354f42d68" containerName="proxy-httpd" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972164 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972172 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="rsync" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972180 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a114c533-fe14-41f3-b4fc-6431a48cdfc9" containerName="object-expirer" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972186 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="container-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972193 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-updater" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972201 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-server" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972207 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972213 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e5ed4e3-d63b-4cd8-b886-483ca0abc75e" containerName="object-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.972221 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="41ffa609-e4fd-42af-b5bb-eeda0fca28be" containerName="account-auditor" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.975952 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.978165 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.978226 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.978367 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.978805 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-dmktp" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.991858 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.996347 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.996404 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-cache\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.996493 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ffwk\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-kube-api-access-7ffwk\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.996530 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:09 crc kubenswrapper[4956]: I1211 22:09:09.996589 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-lock\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.098270 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-lock\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.098339 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.098383 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-cache\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.098433 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ffwk\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-kube-api-access-7ffwk\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.098464 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: E1211 22:09:10.098706 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:09:10 crc kubenswrapper[4956]: E1211 22:09:10.098726 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:09:10 crc kubenswrapper[4956]: E1211 22:09:10.098793 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift podName:1844e361-ee35-4c2f-8bc6-8ddd5ada5445 nodeName:}" failed. No retries permitted until 2025-12-11 22:09:10.598755226 +0000 UTC m=+1243.043133376 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift") pod "swift-storage-0" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445") : configmap "swift-ring-files" not found Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.099117 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-cache\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.099120 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-lock\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.099218 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") device mount path \"/mnt/openstack/pv08\"" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.116653 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ffwk\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-kube-api-access-7ffwk\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.118402 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: I1211 22:09:10.606493 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:10 crc kubenswrapper[4956]: E1211 22:09:10.606747 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:09:10 crc kubenswrapper[4956]: E1211 22:09:10.606795 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:09:10 crc kubenswrapper[4956]: E1211 22:09:10.606861 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift podName:1844e361-ee35-4c2f-8bc6-8ddd5ada5445 nodeName:}" failed. No retries permitted until 2025-12-11 22:09:11.606839419 +0000 UTC m=+1244.051217569 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift") pod "swift-storage-0" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445") : configmap "swift-ring-files" not found Dec 11 22:09:11 crc kubenswrapper[4956]: I1211 22:09:11.621346 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:11 crc kubenswrapper[4956]: E1211 22:09:11.621534 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:09:11 crc kubenswrapper[4956]: E1211 22:09:11.621565 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:09:11 crc kubenswrapper[4956]: E1211 22:09:11.621616 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift podName:1844e361-ee35-4c2f-8bc6-8ddd5ada5445 nodeName:}" failed. No retries permitted until 2025-12-11 22:09:13.621598327 +0000 UTC m=+1246.065976477 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift") pod "swift-storage-0" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445") : configmap "swift-ring-files" not found Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.650349 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:13 crc kubenswrapper[4956]: E1211 22:09:13.650566 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:09:13 crc kubenswrapper[4956]: E1211 22:09:13.650590 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:09:13 crc kubenswrapper[4956]: E1211 22:09:13.650646 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift podName:1844e361-ee35-4c2f-8bc6-8ddd5ada5445 nodeName:}" failed. No retries permitted until 2025-12-11 22:09:17.650628332 +0000 UTC m=+1250.095006482 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift") pod "swift-storage-0" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445") : configmap "swift-ring-files" not found Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.901808 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6jqn5"] Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.902881 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.905799 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.905989 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.909609 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.930849 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6jqn5"] Dec 11 22:09:13 crc kubenswrapper[4956]: E1211 22:09:13.931693 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[dispersionconf etc-swift kube-api-access-nvfhc ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[dispersionconf etc-swift kube-api-access-nvfhc ring-data-devices scripts swiftconf]: context canceled" pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" podUID="9953ecb0-f66f-4ad6-b671-2d70d90a34f5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.941491 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s98dt"] Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.942718 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.957060 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-ring-data-devices\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.957202 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-scripts\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.957530 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvfhc\" (UniqueName: \"kubernetes.io/projected/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-kube-api-access-nvfhc\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.957586 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-dispersionconf\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.957641 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-etc-swift\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.957698 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-swiftconf\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.982822 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s98dt"] Dec 11 22:09:13 crc kubenswrapper[4956]: I1211 22:09:13.989532 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6jqn5"] Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.023217 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.031457 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.058826 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-scripts\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.058885 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvfhc\" (UniqueName: \"kubernetes.io/projected/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-kube-api-access-nvfhc\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.058925 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-scripts\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.058946 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-dispersionconf\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.058985 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-etc-swift\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059023 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-swiftconf\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059047 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-dispersionconf\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059073 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkkhn\" (UniqueName: \"kubernetes.io/projected/66c28ddd-265d-4b2b-becd-450ee962da58-kube-api-access-qkkhn\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059097 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-ring-data-devices\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059134 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-ring-data-devices\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059155 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66c28ddd-265d-4b2b-becd-450ee962da58-etc-swift\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059179 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-swiftconf\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059340 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-etc-swift\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.059802 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-scripts\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.060536 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-ring-data-devices\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.064955 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-dispersionconf\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.065228 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-swiftconf\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.075273 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvfhc\" (UniqueName: \"kubernetes.io/projected/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-kube-api-access-nvfhc\") pod \"swift-ring-rebalance-6jqn5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.159829 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-scripts\") pod \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.159897 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvfhc\" (UniqueName: \"kubernetes.io/projected/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-kube-api-access-nvfhc\") pod \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.159940 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-ring-data-devices\") pod \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.159973 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-etc-swift\") pod \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.159999 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-dispersionconf\") pod \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160029 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-swiftconf\") pod \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\" (UID: \"9953ecb0-f66f-4ad6-b671-2d70d90a34f5\") " Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160110 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-ring-data-devices\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160141 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66c28ddd-265d-4b2b-becd-450ee962da58-etc-swift\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160157 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-swiftconf\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160230 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-scripts\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160274 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-dispersionconf\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160292 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkkhn\" (UniqueName: \"kubernetes.io/projected/66c28ddd-265d-4b2b-becd-450ee962da58-kube-api-access-qkkhn\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160431 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "9953ecb0-f66f-4ad6-b671-2d70d90a34f5" (UID: "9953ecb0-f66f-4ad6-b671-2d70d90a34f5"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160676 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "9953ecb0-f66f-4ad6-b671-2d70d90a34f5" (UID: "9953ecb0-f66f-4ad6-b671-2d70d90a34f5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.160728 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-scripts" (OuterVolumeSpecName: "scripts") pod "9953ecb0-f66f-4ad6-b671-2d70d90a34f5" (UID: "9953ecb0-f66f-4ad6-b671-2d70d90a34f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.163609 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-kube-api-access-nvfhc" (OuterVolumeSpecName: "kube-api-access-nvfhc") pod "9953ecb0-f66f-4ad6-b671-2d70d90a34f5" (UID: "9953ecb0-f66f-4ad6-b671-2d70d90a34f5"). InnerVolumeSpecName "kube-api-access-nvfhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.163955 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66c28ddd-265d-4b2b-becd-450ee962da58-etc-swift\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.165536 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-ring-data-devices\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.165587 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-scripts\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.165735 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "9953ecb0-f66f-4ad6-b671-2d70d90a34f5" (UID: "9953ecb0-f66f-4ad6-b671-2d70d90a34f5"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.166338 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "9953ecb0-f66f-4ad6-b671-2d70d90a34f5" (UID: "9953ecb0-f66f-4ad6-b671-2d70d90a34f5"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.169918 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-swiftconf\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.171195 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-dispersionconf\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.181563 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkkhn\" (UniqueName: \"kubernetes.io/projected/66c28ddd-265d-4b2b-becd-450ee962da58-kube-api-access-qkkhn\") pod \"swift-ring-rebalance-s98dt\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.260826 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.260862 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvfhc\" (UniqueName: \"kubernetes.io/projected/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-kube-api-access-nvfhc\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.260873 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.260881 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.260890 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.260899 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9953ecb0-f66f-4ad6-b671-2d70d90a34f5-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.288122 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:14 crc kubenswrapper[4956]: I1211 22:09:14.702462 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s98dt"] Dec 11 22:09:14 crc kubenswrapper[4956]: W1211 22:09:14.709871 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66c28ddd_265d_4b2b_becd_450ee962da58.slice/crio-4a2911c94a17bbf19e20516ffdab6bf585dbb59441206a118b636c35b25cfac5 WatchSource:0}: Error finding container 4a2911c94a17bbf19e20516ffdab6bf585dbb59441206a118b636c35b25cfac5: Status 404 returned error can't find the container with id 4a2911c94a17bbf19e20516ffdab6bf585dbb59441206a118b636c35b25cfac5 Dec 11 22:09:15 crc kubenswrapper[4956]: I1211 22:09:15.037854 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-6jqn5" Dec 11 22:09:15 crc kubenswrapper[4956]: I1211 22:09:15.037849 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" event={"ID":"66c28ddd-265d-4b2b-becd-450ee962da58","Type":"ContainerStarted","Data":"b1074594adcc4a01dce7038a77f03a4c85373c8f92c8698a64bfc08926ae86b9"} Dec 11 22:09:15 crc kubenswrapper[4956]: I1211 22:09:15.038370 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" event={"ID":"66c28ddd-265d-4b2b-becd-450ee962da58","Type":"ContainerStarted","Data":"4a2911c94a17bbf19e20516ffdab6bf585dbb59441206a118b636c35b25cfac5"} Dec 11 22:09:15 crc kubenswrapper[4956]: I1211 22:09:15.106211 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" podStartSLOduration=2.106184201 podStartE2EDuration="2.106184201s" podCreationTimestamp="2025-12-11 22:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:09:15.056554147 +0000 UTC m=+1247.500932307" watchObservedRunningTime="2025-12-11 22:09:15.106184201 +0000 UTC m=+1247.550562351" Dec 11 22:09:15 crc kubenswrapper[4956]: I1211 22:09:15.122895 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6jqn5"] Dec 11 22:09:15 crc kubenswrapper[4956]: I1211 22:09:15.138388 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-6jqn5"] Dec 11 22:09:16 crc kubenswrapper[4956]: I1211 22:09:16.030607 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9953ecb0-f66f-4ad6-b671-2d70d90a34f5" path="/var/lib/kubelet/pods/9953ecb0-f66f-4ad6-b671-2d70d90a34f5/volumes" Dec 11 22:09:16 crc kubenswrapper[4956]: I1211 22:09:16.888690 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:09:16 crc kubenswrapper[4956]: I1211 22:09:16.889179 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:09:17 crc kubenswrapper[4956]: I1211 22:09:17.661182 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:17 crc kubenswrapper[4956]: E1211 22:09:17.661357 4956 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 11 22:09:17 crc kubenswrapper[4956]: E1211 22:09:17.661391 4956 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 11 22:09:17 crc kubenswrapper[4956]: E1211 22:09:17.661448 4956 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift podName:1844e361-ee35-4c2f-8bc6-8ddd5ada5445 nodeName:}" failed. No retries permitted until 2025-12-11 22:09:25.661428969 +0000 UTC m=+1258.105807119 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift") pod "swift-storage-0" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445") : configmap "swift-ring-files" not found Dec 11 22:09:22 crc kubenswrapper[4956]: I1211 22:09:22.150426 4956 generic.go:334] "Generic (PLEG): container finished" podID="66c28ddd-265d-4b2b-becd-450ee962da58" containerID="b1074594adcc4a01dce7038a77f03a4c85373c8f92c8698a64bfc08926ae86b9" exitCode=0 Dec 11 22:09:22 crc kubenswrapper[4956]: I1211 22:09:22.150523 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" event={"ID":"66c28ddd-265d-4b2b-becd-450ee962da58","Type":"ContainerDied","Data":"b1074594adcc4a01dce7038a77f03a4c85373c8f92c8698a64bfc08926ae86b9"} Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.421292 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.614026 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-dispersionconf\") pod \"66c28ddd-265d-4b2b-becd-450ee962da58\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.614091 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66c28ddd-265d-4b2b-becd-450ee962da58-etc-swift\") pod \"66c28ddd-265d-4b2b-becd-450ee962da58\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.614151 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkkhn\" (UniqueName: \"kubernetes.io/projected/66c28ddd-265d-4b2b-becd-450ee962da58-kube-api-access-qkkhn\") pod \"66c28ddd-265d-4b2b-becd-450ee962da58\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.614259 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-scripts\") pod \"66c28ddd-265d-4b2b-becd-450ee962da58\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.614302 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-swiftconf\") pod \"66c28ddd-265d-4b2b-becd-450ee962da58\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.614416 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-ring-data-devices\") pod \"66c28ddd-265d-4b2b-becd-450ee962da58\" (UID: \"66c28ddd-265d-4b2b-becd-450ee962da58\") " Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.616042 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66c28ddd-265d-4b2b-becd-450ee962da58-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "66c28ddd-265d-4b2b-becd-450ee962da58" (UID: "66c28ddd-265d-4b2b-becd-450ee962da58"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.616057 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "66c28ddd-265d-4b2b-becd-450ee962da58" (UID: "66c28ddd-265d-4b2b-becd-450ee962da58"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.625419 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66c28ddd-265d-4b2b-becd-450ee962da58-kube-api-access-qkkhn" (OuterVolumeSpecName: "kube-api-access-qkkhn") pod "66c28ddd-265d-4b2b-becd-450ee962da58" (UID: "66c28ddd-265d-4b2b-becd-450ee962da58"). InnerVolumeSpecName "kube-api-access-qkkhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.627972 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "66c28ddd-265d-4b2b-becd-450ee962da58" (UID: "66c28ddd-265d-4b2b-becd-450ee962da58"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.636944 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-scripts" (OuterVolumeSpecName: "scripts") pod "66c28ddd-265d-4b2b-becd-450ee962da58" (UID: "66c28ddd-265d-4b2b-becd-450ee962da58"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.645531 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "66c28ddd-265d-4b2b-becd-450ee962da58" (UID: "66c28ddd-265d-4b2b-becd-450ee962da58"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.716544 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.716599 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.716619 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/66c28ddd-265d-4b2b-becd-450ee962da58-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.716639 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/66c28ddd-265d-4b2b-becd-450ee962da58-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.716658 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/66c28ddd-265d-4b2b-becd-450ee962da58-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:23 crc kubenswrapper[4956]: I1211 22:09:23.716676 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkkhn\" (UniqueName: \"kubernetes.io/projected/66c28ddd-265d-4b2b-becd-450ee962da58-kube-api-access-qkkhn\") on node \"crc\" DevicePath \"\"" Dec 11 22:09:24 crc kubenswrapper[4956]: I1211 22:09:24.171323 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" event={"ID":"66c28ddd-265d-4b2b-becd-450ee962da58","Type":"ContainerDied","Data":"4a2911c94a17bbf19e20516ffdab6bf585dbb59441206a118b636c35b25cfac5"} Dec 11 22:09:24 crc kubenswrapper[4956]: I1211 22:09:24.171384 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a2911c94a17bbf19e20516ffdab6bf585dbb59441206a118b636c35b25cfac5" Dec 11 22:09:24 crc kubenswrapper[4956]: I1211 22:09:24.171447 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-s98dt" Dec 11 22:09:25 crc kubenswrapper[4956]: I1211 22:09:25.747826 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:25 crc kubenswrapper[4956]: I1211 22:09:25.757629 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"swift-storage-0\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:25 crc kubenswrapper[4956]: I1211 22:09:25.897804 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:09:26 crc kubenswrapper[4956]: I1211 22:09:26.458784 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:09:27 crc kubenswrapper[4956]: I1211 22:09:27.210095 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"fa26039f3dc2afc660aeb61b4108bd16e9f7994d916f3b78a8c8b3206e17ffc6"} Dec 11 22:09:27 crc kubenswrapper[4956]: I1211 22:09:27.210443 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"531915597e5bfc8894bd3c4cdbd292cc8c16328e98d860fae953763f937f355b"} Dec 11 22:09:27 crc kubenswrapper[4956]: I1211 22:09:27.210460 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"03785274dd06bb71bd539ea0191ea87bffb303056e8ff08dcc27a86d6d20cde7"} Dec 11 22:09:27 crc kubenswrapper[4956]: I1211 22:09:27.210471 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"f42b34da00640445d7f41f37a60789ee942e2b56e2f1d843f6f597c3bd680064"} Dec 11 22:09:27 crc kubenswrapper[4956]: I1211 22:09:27.210481 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"bc4ce11530db4d7aa94b181d0ac5bbe41016bb36bc3b81f2ef8ed18300920c2f"} Dec 11 22:09:27 crc kubenswrapper[4956]: I1211 22:09:27.210492 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"8845f0a0f103d536a83d8be26e4c90b585992e6e0bc0ba2b3bb943bac0ca2e24"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.306857 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"bfc1c3080b6a427dbb30b3c3e2a54714de087157c54e925845c077410dc06012"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.307201 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"6c529247cb79b86c8840972bdb2535e99c7acfb5ca83c4e7c419b12af81643bb"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.307214 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"0bf8628f6db21e853b47a9127d07107df97ff5cb36f70ff0f9053e9b4a29abbf"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.307226 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"2124c7e81a8406a24c2f606c393966b55e66c61c9149af1054091a92d6754f7e"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.307237 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"0df5dfbf514fc82c9b20389bee8122a1b54f5466d75e11570c5295139ccae9e6"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.307247 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"5240ec284acdffff31ad3df9116325845d3f936d9b94892edd27480f7eda5d43"} Dec 11 22:09:28 crc kubenswrapper[4956]: I1211 22:09:28.307257 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"31b7e3d8beed14cdcd332ad70f7932dac013362e417886c5989aa65e797ccd34"} Dec 11 22:09:29 crc kubenswrapper[4956]: I1211 22:09:29.319139 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"a0e4baf33050ecc4d04e84bbd15bea1f4382c51edfca0492260184b2d36edb96"} Dec 11 22:09:29 crc kubenswrapper[4956]: I1211 22:09:29.319459 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"46ca53a46b51666e802ebdb0db610c33c39710bab87681373f308a5de47cd1cc"} Dec 11 22:09:29 crc kubenswrapper[4956]: I1211 22:09:29.319470 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"818eb5d4ca9845544fd20579d0975240b833028bfcf4cf6cb2f54eddf77d08ff"} Dec 11 22:09:29 crc kubenswrapper[4956]: I1211 22:09:29.319480 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerStarted","Data":"e0eb0453deda9b30858bd0dafd5d97dacda1037dc916bbee7ba68709bd4cce5e"} Dec 11 22:09:29 crc kubenswrapper[4956]: I1211 22:09:29.365150 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=21.36513253 podStartE2EDuration="21.36513253s" podCreationTimestamp="2025-12-11 22:09:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:09:29.361389178 +0000 UTC m=+1261.805767328" watchObservedRunningTime="2025-12-11 22:09:29.36513253 +0000 UTC m=+1261.809510680" Dec 11 22:09:46 crc kubenswrapper[4956]: I1211 22:09:46.888841 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:09:46 crc kubenswrapper[4956]: I1211 22:09:46.889611 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:09:46 crc kubenswrapper[4956]: I1211 22:09:46.889680 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:09:46 crc kubenswrapper[4956]: I1211 22:09:46.890691 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c6c779569135f522c95dbbde5c790dc03324f364d3461dab8d0acf4bad2ce223"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:09:46 crc kubenswrapper[4956]: I1211 22:09:46.890852 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://c6c779569135f522c95dbbde5c790dc03324f364d3461dab8d0acf4bad2ce223" gracePeriod=600 Dec 11 22:09:47 crc kubenswrapper[4956]: I1211 22:09:47.460707 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="c6c779569135f522c95dbbde5c790dc03324f364d3461dab8d0acf4bad2ce223" exitCode=0 Dec 11 22:09:47 crc kubenswrapper[4956]: I1211 22:09:47.460888 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"c6c779569135f522c95dbbde5c790dc03324f364d3461dab8d0acf4bad2ce223"} Dec 11 22:09:47 crc kubenswrapper[4956]: I1211 22:09:47.461275 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"9fbcd49be89040ede2d7bbee715126cd5d47cc31e9a909c82551139fd0849303"} Dec 11 22:09:47 crc kubenswrapper[4956]: I1211 22:09:47.461375 4956 scope.go:117] "RemoveContainer" containerID="16eb3ed064bf8b2d4bc79eb3f1d7745450b60887fc5c6da806964966eb18a92c" Dec 11 22:12:16 crc kubenswrapper[4956]: I1211 22:12:16.888191 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:12:16 crc kubenswrapper[4956]: I1211 22:12:16.888866 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:12:46 crc kubenswrapper[4956]: I1211 22:12:46.888324 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:12:46 crc kubenswrapper[4956]: I1211 22:12:46.890555 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:13:05 crc kubenswrapper[4956]: I1211 22:13:05.945518 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jpvlt"] Dec 11 22:13:05 crc kubenswrapper[4956]: E1211 22:13:05.946469 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66c28ddd-265d-4b2b-becd-450ee962da58" containerName="swift-ring-rebalance" Dec 11 22:13:05 crc kubenswrapper[4956]: I1211 22:13:05.946484 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="66c28ddd-265d-4b2b-becd-450ee962da58" containerName="swift-ring-rebalance" Dec 11 22:13:05 crc kubenswrapper[4956]: I1211 22:13:05.946640 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="66c28ddd-265d-4b2b-becd-450ee962da58" containerName="swift-ring-rebalance" Dec 11 22:13:05 crc kubenswrapper[4956]: I1211 22:13:05.947795 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:05 crc kubenswrapper[4956]: I1211 22:13:05.967927 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jpvlt"] Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.018896 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-utilities\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.018956 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kmg4\" (UniqueName: \"kubernetes.io/projected/ed8914e2-78fe-4d03-a80c-721857c451b5-kube-api-access-2kmg4\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.019073 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-catalog-content\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.120486 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-utilities\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.120547 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kmg4\" (UniqueName: \"kubernetes.io/projected/ed8914e2-78fe-4d03-a80c-721857c451b5-kube-api-access-2kmg4\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.120628 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-catalog-content\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.121094 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-utilities\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.121153 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-catalog-content\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.142348 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kmg4\" (UniqueName: \"kubernetes.io/projected/ed8914e2-78fe-4d03-a80c-721857c451b5-kube-api-access-2kmg4\") pod \"community-operators-jpvlt\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.273106 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:06 crc kubenswrapper[4956]: I1211 22:13:06.777688 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jpvlt"] Dec 11 22:13:06 crc kubenswrapper[4956]: W1211 22:13:06.783756 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded8914e2_78fe_4d03_a80c_721857c451b5.slice/crio-c5c17714f0184213a00cc6a89754a5dd795f7bc47f163eeb5d623efbe0a3c7a1 WatchSource:0}: Error finding container c5c17714f0184213a00cc6a89754a5dd795f7bc47f163eeb5d623efbe0a3c7a1: Status 404 returned error can't find the container with id c5c17714f0184213a00cc6a89754a5dd795f7bc47f163eeb5d623efbe0a3c7a1 Dec 11 22:13:07 crc kubenswrapper[4956]: I1211 22:13:07.287414 4956 generic.go:334] "Generic (PLEG): container finished" podID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerID="0f311447a4ca70a0393a8ee96a103f5f48d9d76155f590270d1dc5cafac34a56" exitCode=0 Dec 11 22:13:07 crc kubenswrapper[4956]: I1211 22:13:07.287492 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerDied","Data":"0f311447a4ca70a0393a8ee96a103f5f48d9d76155f590270d1dc5cafac34a56"} Dec 11 22:13:07 crc kubenswrapper[4956]: I1211 22:13:07.287531 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerStarted","Data":"c5c17714f0184213a00cc6a89754a5dd795f7bc47f163eeb5d623efbe0a3c7a1"} Dec 11 22:13:07 crc kubenswrapper[4956]: I1211 22:13:07.290899 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 22:13:08 crc kubenswrapper[4956]: I1211 22:13:08.298061 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerStarted","Data":"007c9e44731a104cb9c2abbf574e211757f561b63aa558c11f3c5d8151a846de"} Dec 11 22:13:09 crc kubenswrapper[4956]: I1211 22:13:09.328891 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerDied","Data":"007c9e44731a104cb9c2abbf574e211757f561b63aa558c11f3c5d8151a846de"} Dec 11 22:13:09 crc kubenswrapper[4956]: I1211 22:13:09.328763 4956 generic.go:334] "Generic (PLEG): container finished" podID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerID="007c9e44731a104cb9c2abbf574e211757f561b63aa558c11f3c5d8151a846de" exitCode=0 Dec 11 22:13:10 crc kubenswrapper[4956]: I1211 22:13:10.340175 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerStarted","Data":"86cef842bf8a250e6f187eb194a2c6dcec4be761ff2bb7b7ae1d98f2f4bd6375"} Dec 11 22:13:10 crc kubenswrapper[4956]: I1211 22:13:10.375408 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jpvlt" podStartSLOduration=2.693196822 podStartE2EDuration="5.375391382s" podCreationTimestamp="2025-12-11 22:13:05 +0000 UTC" firstStartedPulling="2025-12-11 22:13:07.290300266 +0000 UTC m=+1479.734678456" lastFinishedPulling="2025-12-11 22:13:09.972494866 +0000 UTC m=+1482.416873016" observedRunningTime="2025-12-11 22:13:10.371472377 +0000 UTC m=+1482.815850527" watchObservedRunningTime="2025-12-11 22:13:10.375391382 +0000 UTC m=+1482.819769532" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.274419 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.275018 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.329826 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.427576 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.562188 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jpvlt"] Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.888575 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.888994 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.889067 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.890131 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9fbcd49be89040ede2d7bbee715126cd5d47cc31e9a909c82551139fd0849303"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:13:16 crc kubenswrapper[4956]: I1211 22:13:16.890371 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://9fbcd49be89040ede2d7bbee715126cd5d47cc31e9a909c82551139fd0849303" gracePeriod=600 Dec 11 22:13:17 crc kubenswrapper[4956]: I1211 22:13:17.399740 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="9fbcd49be89040ede2d7bbee715126cd5d47cc31e9a909c82551139fd0849303" exitCode=0 Dec 11 22:13:17 crc kubenswrapper[4956]: I1211 22:13:17.399887 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"9fbcd49be89040ede2d7bbee715126cd5d47cc31e9a909c82551139fd0849303"} Dec 11 22:13:17 crc kubenswrapper[4956]: I1211 22:13:17.400608 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7"} Dec 11 22:13:17 crc kubenswrapper[4956]: I1211 22:13:17.400630 4956 scope.go:117] "RemoveContainer" containerID="c6c779569135f522c95dbbde5c790dc03324f364d3461dab8d0acf4bad2ce223" Dec 11 22:13:18 crc kubenswrapper[4956]: I1211 22:13:18.424488 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jpvlt" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="registry-server" containerID="cri-o://86cef842bf8a250e6f187eb194a2c6dcec4be761ff2bb7b7ae1d98f2f4bd6375" gracePeriod=2 Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.449404 4956 generic.go:334] "Generic (PLEG): container finished" podID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerID="86cef842bf8a250e6f187eb194a2c6dcec4be761ff2bb7b7ae1d98f2f4bd6375" exitCode=0 Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.449627 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerDied","Data":"86cef842bf8a250e6f187eb194a2c6dcec4be761ff2bb7b7ae1d98f2f4bd6375"} Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.580672 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.754328 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kmg4\" (UniqueName: \"kubernetes.io/projected/ed8914e2-78fe-4d03-a80c-721857c451b5-kube-api-access-2kmg4\") pod \"ed8914e2-78fe-4d03-a80c-721857c451b5\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.754447 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-utilities\") pod \"ed8914e2-78fe-4d03-a80c-721857c451b5\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.754591 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-catalog-content\") pod \"ed8914e2-78fe-4d03-a80c-721857c451b5\" (UID: \"ed8914e2-78fe-4d03-a80c-721857c451b5\") " Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.755438 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-utilities" (OuterVolumeSpecName: "utilities") pod "ed8914e2-78fe-4d03-a80c-721857c451b5" (UID: "ed8914e2-78fe-4d03-a80c-721857c451b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.760141 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8914e2-78fe-4d03-a80c-721857c451b5-kube-api-access-2kmg4" (OuterVolumeSpecName: "kube-api-access-2kmg4") pod "ed8914e2-78fe-4d03-a80c-721857c451b5" (UID: "ed8914e2-78fe-4d03-a80c-721857c451b5"). InnerVolumeSpecName "kube-api-access-2kmg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.831265 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed8914e2-78fe-4d03-a80c-721857c451b5" (UID: "ed8914e2-78fe-4d03-a80c-721857c451b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.857811 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kmg4\" (UniqueName: \"kubernetes.io/projected/ed8914e2-78fe-4d03-a80c-721857c451b5-kube-api-access-2kmg4\") on node \"crc\" DevicePath \"\"" Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.857859 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:13:19 crc kubenswrapper[4956]: I1211 22:13:19.857868 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8914e2-78fe-4d03-a80c-721857c451b5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:13:20 crc kubenswrapper[4956]: E1211 22:13:20.173367 4956 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded8914e2_78fe_4d03_a80c_721857c451b5.slice\": RecentStats: unable to find data in memory cache]" Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.460725 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jpvlt" event={"ID":"ed8914e2-78fe-4d03-a80c-721857c451b5","Type":"ContainerDied","Data":"c5c17714f0184213a00cc6a89754a5dd795f7bc47f163eeb5d623efbe0a3c7a1"} Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.461094 4956 scope.go:117] "RemoveContainer" containerID="86cef842bf8a250e6f187eb194a2c6dcec4be761ff2bb7b7ae1d98f2f4bd6375" Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.460855 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jpvlt" Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.487786 4956 scope.go:117] "RemoveContainer" containerID="007c9e44731a104cb9c2abbf574e211757f561b63aa558c11f3c5d8151a846de" Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.488644 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jpvlt"] Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.494425 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jpvlt"] Dec 11 22:13:20 crc kubenswrapper[4956]: I1211 22:13:20.507336 4956 scope.go:117] "RemoveContainer" containerID="0f311447a4ca70a0393a8ee96a103f5f48d9d76155f590270d1dc5cafac34a56" Dec 11 22:13:22 crc kubenswrapper[4956]: I1211 22:13:22.035534 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" path="/var/lib/kubelet/pods/ed8914e2-78fe-4d03-a80c-721857c451b5/volumes" Dec 11 22:14:02 crc kubenswrapper[4956]: I1211 22:14:02.074406 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/root-account-create-update-c4fh7"] Dec 11 22:14:02 crc kubenswrapper[4956]: I1211 22:14:02.084204 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/root-account-create-update-c4fh7"] Dec 11 22:14:04 crc kubenswrapper[4956]: I1211 22:14:04.032643 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aee315b-55ac-4721-8ad1-ff6128dc0b15" path="/var/lib/kubelet/pods/5aee315b-55ac-4721-8ad1-ff6128dc0b15/volumes" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.053821 4956 scope.go:117] "RemoveContainer" containerID="f63154460a3110e5255b3ef2fe24ccdd055d84f3bad6882295d3d709f130f286" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.083085 4956 scope.go:117] "RemoveContainer" containerID="abc834d6a7065ba9aba75a616670e471fcad6c34902e868c09cd27a3200f0f50" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.102012 4956 scope.go:117] "RemoveContainer" containerID="f55235b10356a944991fa8ae283b28fde5f8ad0369f476676bb3ffa23c942fe1" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.118680 4956 scope.go:117] "RemoveContainer" containerID="f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.135123 4956 scope.go:117] "RemoveContainer" containerID="26c1c73dcf0c5b39c4650886e10e51e3546fdbba8f61bbc344044f4d8589fa89" Dec 11 22:14:34 crc kubenswrapper[4956]: E1211 22:14:34.137346 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b\": container with ID starting with f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b not found: ID does not exist" containerID="f9670200134c1286eaf9310939478d01fc8211a2de256c795f9999d5b3847c1b" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.150110 4956 scope.go:117] "RemoveContainer" containerID="2435489d05e6fe368d33194469d365927b43d7cf85f6a3d60022925f9a4f27bb" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.167405 4956 scope.go:117] "RemoveContainer" containerID="f3c99d9956bfc929e57948603b9d53a18d43469dfe98a68080ac495d23746def" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.186588 4956 scope.go:117] "RemoveContainer" containerID="a561660c7cf172a9504610fea52d23fc6ba2c25dd99b1e8e60024938473e01d2" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.207869 4956 scope.go:117] "RemoveContainer" containerID="dffb601064de95f9027bf6a1fe306997f65ac96acff81e28575f1d442ba6fa03" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.261546 4956 scope.go:117] "RemoveContainer" containerID="9d4aed0b7b74b22c6bddf686ce2036cb23fae0e337eb638f319bab7788fae09f" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.285857 4956 scope.go:117] "RemoveContainer" containerID="d1b3e1eae3a6852d757d436c0870da719cfaa0fe8df3d251bf5d0595bd07f739" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.308023 4956 scope.go:117] "RemoveContainer" containerID="5585fc1523add48f6aaa6482f0fd078a810c938185772132e926ad6267cd67b5" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.951025 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-724l4"] Dec 11 22:14:34 crc kubenswrapper[4956]: E1211 22:14:34.951375 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="extract-content" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.951391 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="extract-content" Dec 11 22:14:34 crc kubenswrapper[4956]: E1211 22:14:34.951421 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="extract-utilities" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.951429 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="extract-utilities" Dec 11 22:14:34 crc kubenswrapper[4956]: E1211 22:14:34.951446 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="registry-server" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.951454 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="registry-server" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.951640 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed8914e2-78fe-4d03-a80c-721857c451b5" containerName="registry-server" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.952871 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.959709 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-724l4"] Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.992782 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-utilities\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.992824 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-catalog-content\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:34 crc kubenswrapper[4956]: I1211 22:14:34.992900 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmbgf\" (UniqueName: \"kubernetes.io/projected/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-kube-api-access-dmbgf\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.094238 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmbgf\" (UniqueName: \"kubernetes.io/projected/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-kube-api-access-dmbgf\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.094331 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-utilities\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.094377 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-catalog-content\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.095237 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-utilities\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.095302 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-catalog-content\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.115655 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmbgf\" (UniqueName: \"kubernetes.io/projected/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-kube-api-access-dmbgf\") pod \"certified-operators-724l4\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.318338 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:35 crc kubenswrapper[4956]: I1211 22:14:35.798563 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-724l4"] Dec 11 22:14:36 crc kubenswrapper[4956]: I1211 22:14:36.154317 4956 generic.go:334] "Generic (PLEG): container finished" podID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerID="6a8978e19ae00a4857e9da2c1d3aa6fd559e830e23d7778ca8b057952b0628a3" exitCode=0 Dec 11 22:14:36 crc kubenswrapper[4956]: I1211 22:14:36.154401 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerDied","Data":"6a8978e19ae00a4857e9da2c1d3aa6fd559e830e23d7778ca8b057952b0628a3"} Dec 11 22:14:36 crc kubenswrapper[4956]: I1211 22:14:36.154649 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerStarted","Data":"6beee134775ab0c7c2eeae829ee3ef299c1f58136d19f885b52a93cca27eadb1"} Dec 11 22:14:37 crc kubenswrapper[4956]: I1211 22:14:37.164388 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerStarted","Data":"970055246ce02c05d85fcb9c4a32332d16ac7d52b473641a57402adbc2073226"} Dec 11 22:14:38 crc kubenswrapper[4956]: I1211 22:14:38.173455 4956 generic.go:334] "Generic (PLEG): container finished" podID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerID="970055246ce02c05d85fcb9c4a32332d16ac7d52b473641a57402adbc2073226" exitCode=0 Dec 11 22:14:38 crc kubenswrapper[4956]: I1211 22:14:38.173506 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerDied","Data":"970055246ce02c05d85fcb9c4a32332d16ac7d52b473641a57402adbc2073226"} Dec 11 22:14:39 crc kubenswrapper[4956]: I1211 22:14:39.187737 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerStarted","Data":"7e3622f21bb2eaedd09d07e36f5ac0f6032507c0a8b8ce8fd0e96aed36c45ad1"} Dec 11 22:14:39 crc kubenswrapper[4956]: I1211 22:14:39.216288 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-724l4" podStartSLOduration=2.679001601 podStartE2EDuration="5.216266635s" podCreationTimestamp="2025-12-11 22:14:34 +0000 UTC" firstStartedPulling="2025-12-11 22:14:36.156023868 +0000 UTC m=+1568.600402018" lastFinishedPulling="2025-12-11 22:14:38.693288882 +0000 UTC m=+1571.137667052" observedRunningTime="2025-12-11 22:14:39.20938367 +0000 UTC m=+1571.653761840" watchObservedRunningTime="2025-12-11 22:14:39.216266635 +0000 UTC m=+1571.660644785" Dec 11 22:14:45 crc kubenswrapper[4956]: I1211 22:14:45.318493 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:45 crc kubenswrapper[4956]: I1211 22:14:45.319014 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:45 crc kubenswrapper[4956]: I1211 22:14:45.358901 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:46 crc kubenswrapper[4956]: I1211 22:14:46.318706 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:46 crc kubenswrapper[4956]: I1211 22:14:46.376746 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-724l4"] Dec 11 22:14:48 crc kubenswrapper[4956]: I1211 22:14:48.263639 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-724l4" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="registry-server" containerID="cri-o://7e3622f21bb2eaedd09d07e36f5ac0f6032507c0a8b8ce8fd0e96aed36c45ad1" gracePeriod=2 Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.271360 4956 generic.go:334] "Generic (PLEG): container finished" podID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerID="7e3622f21bb2eaedd09d07e36f5ac0f6032507c0a8b8ce8fd0e96aed36c45ad1" exitCode=0 Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.271432 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerDied","Data":"7e3622f21bb2eaedd09d07e36f5ac0f6032507c0a8b8ce8fd0e96aed36c45ad1"} Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.787296 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.914677 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmbgf\" (UniqueName: \"kubernetes.io/projected/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-kube-api-access-dmbgf\") pod \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.914765 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-catalog-content\") pod \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.914934 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-utilities\") pod \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\" (UID: \"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53\") " Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.916195 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-utilities" (OuterVolumeSpecName: "utilities") pod "dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" (UID: "dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.919996 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-kube-api-access-dmbgf" (OuterVolumeSpecName: "kube-api-access-dmbgf") pod "dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" (UID: "dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53"). InnerVolumeSpecName "kube-api-access-dmbgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:14:49 crc kubenswrapper[4956]: I1211 22:14:49.988660 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" (UID: "dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.017143 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.017187 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.017204 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmbgf\" (UniqueName: \"kubernetes.io/projected/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53-kube-api-access-dmbgf\") on node \"crc\" DevicePath \"\"" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.286308 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-724l4" event={"ID":"dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53","Type":"ContainerDied","Data":"6beee134775ab0c7c2eeae829ee3ef299c1f58136d19f885b52a93cca27eadb1"} Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.286429 4956 scope.go:117] "RemoveContainer" containerID="7e3622f21bb2eaedd09d07e36f5ac0f6032507c0a8b8ce8fd0e96aed36c45ad1" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.286526 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-724l4" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.322007 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-724l4"] Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.324832 4956 scope.go:117] "RemoveContainer" containerID="970055246ce02c05d85fcb9c4a32332d16ac7d52b473641a57402adbc2073226" Dec 11 22:14:50 crc kubenswrapper[4956]: I1211 22:14:50.330467 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-724l4"] Dec 11 22:14:51 crc kubenswrapper[4956]: I1211 22:14:50.352033 4956 scope.go:117] "RemoveContainer" containerID="6a8978e19ae00a4857e9da2c1d3aa6fd559e830e23d7778ca8b057952b0628a3" Dec 11 22:14:52 crc kubenswrapper[4956]: I1211 22:14:52.030829 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" path="/var/lib/kubelet/pods/dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53/volumes" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.134991 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb"] Dec 11 22:15:00 crc kubenswrapper[4956]: E1211 22:15:00.135926 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="registry-server" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.135941 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="registry-server" Dec 11 22:15:00 crc kubenswrapper[4956]: E1211 22:15:00.135964 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="extract-utilities" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.135970 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="extract-utilities" Dec 11 22:15:00 crc kubenswrapper[4956]: E1211 22:15:00.135993 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="extract-content" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.135999 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="extract-content" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.136122 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7aac8b-4a89-4b5b-85f5-c34ff24a9a53" containerName="registry-server" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.136547 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.138421 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.146187 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb"] Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.147675 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.305588 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7af140b-e088-4525-87e2-18a15331a849-secret-volume\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.305669 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7af140b-e088-4525-87e2-18a15331a849-config-volume\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.305708 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf7hg\" (UniqueName: \"kubernetes.io/projected/e7af140b-e088-4525-87e2-18a15331a849-kube-api-access-hf7hg\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.407309 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7af140b-e088-4525-87e2-18a15331a849-secret-volume\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.407678 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7af140b-e088-4525-87e2-18a15331a849-config-volume\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.407879 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf7hg\" (UniqueName: \"kubernetes.io/projected/e7af140b-e088-4525-87e2-18a15331a849-kube-api-access-hf7hg\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.409106 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7af140b-e088-4525-87e2-18a15331a849-config-volume\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.417140 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7af140b-e088-4525-87e2-18a15331a849-secret-volume\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.428214 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf7hg\" (UniqueName: \"kubernetes.io/projected/e7af140b-e088-4525-87e2-18a15331a849-kube-api-access-hf7hg\") pod \"collect-profiles-29424855-nhzjb\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.464019 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:00 crc kubenswrapper[4956]: I1211 22:15:00.871394 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb"] Dec 11 22:15:01 crc kubenswrapper[4956]: I1211 22:15:01.384637 4956 generic.go:334] "Generic (PLEG): container finished" podID="e7af140b-e088-4525-87e2-18a15331a849" containerID="e48240c65d577839e75e743530b2ae34951545115d0c3a2e0d340b6ba0ba7a38" exitCode=0 Dec 11 22:15:01 crc kubenswrapper[4956]: I1211 22:15:01.384728 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" event={"ID":"e7af140b-e088-4525-87e2-18a15331a849","Type":"ContainerDied","Data":"e48240c65d577839e75e743530b2ae34951545115d0c3a2e0d340b6ba0ba7a38"} Dec 11 22:15:01 crc kubenswrapper[4956]: I1211 22:15:01.385046 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" event={"ID":"e7af140b-e088-4525-87e2-18a15331a849","Type":"ContainerStarted","Data":"1df71bb970fadda5f4fd953d76984f2311097d2ea592ad7f3b244af693ed4b9b"} Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.692059 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.839854 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7af140b-e088-4525-87e2-18a15331a849-secret-volume\") pod \"e7af140b-e088-4525-87e2-18a15331a849\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.839993 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7af140b-e088-4525-87e2-18a15331a849-config-volume\") pod \"e7af140b-e088-4525-87e2-18a15331a849\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.840038 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf7hg\" (UniqueName: \"kubernetes.io/projected/e7af140b-e088-4525-87e2-18a15331a849-kube-api-access-hf7hg\") pod \"e7af140b-e088-4525-87e2-18a15331a849\" (UID: \"e7af140b-e088-4525-87e2-18a15331a849\") " Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.840652 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7af140b-e088-4525-87e2-18a15331a849-config-volume" (OuterVolumeSpecName: "config-volume") pod "e7af140b-e088-4525-87e2-18a15331a849" (UID: "e7af140b-e088-4525-87e2-18a15331a849"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.847027 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7af140b-e088-4525-87e2-18a15331a849-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e7af140b-e088-4525-87e2-18a15331a849" (UID: "e7af140b-e088-4525-87e2-18a15331a849"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.847162 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7af140b-e088-4525-87e2-18a15331a849-kube-api-access-hf7hg" (OuterVolumeSpecName: "kube-api-access-hf7hg") pod "e7af140b-e088-4525-87e2-18a15331a849" (UID: "e7af140b-e088-4525-87e2-18a15331a849"). InnerVolumeSpecName "kube-api-access-hf7hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.941893 4956 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7af140b-e088-4525-87e2-18a15331a849-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.941926 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf7hg\" (UniqueName: \"kubernetes.io/projected/e7af140b-e088-4525-87e2-18a15331a849-kube-api-access-hf7hg\") on node \"crc\" DevicePath \"\"" Dec 11 22:15:02 crc kubenswrapper[4956]: I1211 22:15:02.941940 4956 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7af140b-e088-4525-87e2-18a15331a849-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 22:15:03 crc kubenswrapper[4956]: I1211 22:15:03.404331 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" event={"ID":"e7af140b-e088-4525-87e2-18a15331a849","Type":"ContainerDied","Data":"1df71bb970fadda5f4fd953d76984f2311097d2ea592ad7f3b244af693ed4b9b"} Dec 11 22:15:03 crc kubenswrapper[4956]: I1211 22:15:03.404384 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1df71bb970fadda5f4fd953d76984f2311097d2ea592ad7f3b244af693ed4b9b" Dec 11 22:15:03 crc kubenswrapper[4956]: I1211 22:15:03.404477 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424855-nhzjb" Dec 11 22:15:45 crc kubenswrapper[4956]: I1211 22:15:45.054025 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-6e56-account-create-update-7csrp"] Dec 11 22:15:45 crc kubenswrapper[4956]: I1211 22:15:45.063398 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-6e56-account-create-update-7csrp"] Dec 11 22:15:46 crc kubenswrapper[4956]: I1211 22:15:46.039645 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3573b9e-0775-4bee-bb5f-df5a7c4f62fe" path="/var/lib/kubelet/pods/b3573b9e-0775-4bee-bb5f-df5a7c4f62fe/volumes" Dec 11 22:15:46 crc kubenswrapper[4956]: I1211 22:15:46.041258 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-db-create-dz86p"] Dec 11 22:15:46 crc kubenswrapper[4956]: I1211 22:15:46.041310 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-db-create-dz86p"] Dec 11 22:15:46 crc kubenswrapper[4956]: I1211 22:15:46.888030 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:15:46 crc kubenswrapper[4956]: I1211 22:15:46.888103 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:15:48 crc kubenswrapper[4956]: I1211 22:15:48.030523 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fbb0d97-cbdf-4c82-bf2e-69f422c76813" path="/var/lib/kubelet/pods/2fbb0d97-cbdf-4c82-bf2e-69f422c76813/volumes" Dec 11 22:16:16 crc kubenswrapper[4956]: I1211 22:16:16.888096 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:16:16 crc kubenswrapper[4956]: I1211 22:16:16.888707 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:16:34 crc kubenswrapper[4956]: I1211 22:16:34.430408 4956 scope.go:117] "RemoveContainer" containerID="2454fcb60eec66f2260b873c63ff19c69059df1781620b1d8527f9119ed57b68" Dec 11 22:16:34 crc kubenswrapper[4956]: I1211 22:16:34.460936 4956 scope.go:117] "RemoveContainer" containerID="e836a5eb58fc684c1049070aab0a3ea3ecc48bef21df4157d396696ab7904452" Dec 11 22:16:35 crc kubenswrapper[4956]: I1211 22:16:35.042968 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-kws2f"] Dec 11 22:16:35 crc kubenswrapper[4956]: I1211 22:16:35.052557 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-kws2f"] Dec 11 22:16:36 crc kubenswrapper[4956]: I1211 22:16:36.031348 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fbd3dcf-e490-45e0-b94d-ab44f70e176e" path="/var/lib/kubelet/pods/2fbd3dcf-e490-45e0-b94d-ab44f70e176e/volumes" Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.046323 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc"] Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.052436 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-5eb0-account-create-update-dv7rc"] Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.058720 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-p8nhm"] Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.063914 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-db-create-lkhh8"] Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.069443 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-p8nhm"] Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.075176 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-db-create-lkhh8"] Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.888325 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.888410 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.888463 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.889211 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:16:46 crc kubenswrapper[4956]: I1211 22:16:46.889628 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" gracePeriod=600 Dec 11 22:16:47 crc kubenswrapper[4956]: E1211 22:16:47.042763 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:16:47 crc kubenswrapper[4956]: I1211 22:16:47.440748 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" exitCode=0 Dec 11 22:16:47 crc kubenswrapper[4956]: I1211 22:16:47.440857 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7"} Dec 11 22:16:47 crc kubenswrapper[4956]: I1211 22:16:47.440909 4956 scope.go:117] "RemoveContainer" containerID="9fbcd49be89040ede2d7bbee715126cd5d47cc31e9a909c82551139fd0849303" Dec 11 22:16:47 crc kubenswrapper[4956]: I1211 22:16:47.441789 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:16:47 crc kubenswrapper[4956]: E1211 22:16:47.442128 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:16:48 crc kubenswrapper[4956]: I1211 22:16:48.042898 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17780bbd-36df-40ee-a35f-61f848e57120" path="/var/lib/kubelet/pods/17780bbd-36df-40ee-a35f-61f848e57120/volumes" Dec 11 22:16:48 crc kubenswrapper[4956]: I1211 22:16:48.043995 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ffb504a-4c0b-483e-80b3-17c9ffd66385" path="/var/lib/kubelet/pods/5ffb504a-4c0b-483e-80b3-17c9ffd66385/volumes" Dec 11 22:16:48 crc kubenswrapper[4956]: I1211 22:16:48.045125 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="990facdf-1f75-4afc-b70b-464384b2c021" path="/var/lib/kubelet/pods/990facdf-1f75-4afc-b70b-464384b2c021/volumes" Dec 11 22:17:03 crc kubenswrapper[4956]: I1211 22:17:03.022342 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:17:03 crc kubenswrapper[4956]: E1211 22:17:03.023376 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:17:17 crc kubenswrapper[4956]: I1211 22:17:17.022012 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:17:17 crc kubenswrapper[4956]: E1211 22:17:17.023127 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:17:30 crc kubenswrapper[4956]: I1211 22:17:30.021333 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:17:30 crc kubenswrapper[4956]: E1211 22:17:30.022601 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:17:34 crc kubenswrapper[4956]: I1211 22:17:34.543691 4956 scope.go:117] "RemoveContainer" containerID="6ccd9c935f4174be89722490acdab4a95c0b75b7d85f7d38925db439146a88d1" Dec 11 22:17:34 crc kubenswrapper[4956]: I1211 22:17:34.580278 4956 scope.go:117] "RemoveContainer" containerID="f4121f79478a96f02bbdb01651fb7765faa55944acaba4a9451411c1b547074c" Dec 11 22:17:34 crc kubenswrapper[4956]: I1211 22:17:34.623121 4956 scope.go:117] "RemoveContainer" containerID="f191445703ff927c9f6afe5eb9ae2407808a76ccbc7423240210401ff9163723" Dec 11 22:17:34 crc kubenswrapper[4956]: I1211 22:17:34.688093 4956 scope.go:117] "RemoveContainer" containerID="dc4c1d73124ddb0bd856448e218e18144f13e29b839e858d2ea3365931b39379" Dec 11 22:17:43 crc kubenswrapper[4956]: I1211 22:17:43.020987 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:17:43 crc kubenswrapper[4956]: E1211 22:17:43.021801 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:17:58 crc kubenswrapper[4956]: I1211 22:17:58.031721 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:17:58 crc kubenswrapper[4956]: E1211 22:17:58.032929 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:18:13 crc kubenswrapper[4956]: I1211 22:18:13.021659 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:18:13 crc kubenswrapper[4956]: E1211 22:18:13.022568 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:18:24 crc kubenswrapper[4956]: I1211 22:18:24.021452 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:18:24 crc kubenswrapper[4956]: E1211 22:18:24.022532 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:18:36 crc kubenswrapper[4956]: I1211 22:18:36.021333 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:18:36 crc kubenswrapper[4956]: E1211 22:18:36.022289 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:18:51 crc kubenswrapper[4956]: I1211 22:18:51.021506 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:18:51 crc kubenswrapper[4956]: E1211 22:18:51.022671 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:19:02 crc kubenswrapper[4956]: I1211 22:19:02.021325 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:19:02 crc kubenswrapper[4956]: E1211 22:19:02.022433 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.021845 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:19:13 crc kubenswrapper[4956]: E1211 22:19:13.022398 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.031357 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:19:13 crc kubenswrapper[4956]: E1211 22:19:13.031852 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7af140b-e088-4525-87e2-18a15331a849" containerName="collect-profiles" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.031886 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7af140b-e088-4525-87e2-18a15331a849" containerName="collect-profiles" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.032129 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7af140b-e088-4525-87e2-18a15331a849" containerName="collect-profiles" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.039936 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.040647 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.052915 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.074670 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.089582 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.211969 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smdx7\" (UniqueName: \"kubernetes.io/projected/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-kube-api-access-smdx7\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212091 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/20a57394-ad58-443c-9c3b-4ad74b00cf66-cache\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212127 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-lock\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212160 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212181 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/20a57394-ad58-443c-9c3b-4ad74b00cf66-etc-swift\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212235 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-cache\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212256 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212494 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-etc-swift\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212510 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/20a57394-ad58-443c-9c3b-4ad74b00cf66-lock\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.212525 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lhf2\" (UniqueName: \"kubernetes.io/projected/20a57394-ad58-443c-9c3b-4ad74b00cf66-kube-api-access-9lhf2\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.292723 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49"] Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.293949 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.296003 4956 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.310730 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49"] Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314005 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-etc-swift\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314067 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/20a57394-ad58-443c-9c3b-4ad74b00cf66-lock\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314096 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lhf2\" (UniqueName: \"kubernetes.io/projected/20a57394-ad58-443c-9c3b-4ad74b00cf66-kube-api-access-9lhf2\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314128 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smdx7\" (UniqueName: \"kubernetes.io/projected/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-kube-api-access-smdx7\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314164 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/20a57394-ad58-443c-9c3b-4ad74b00cf66-cache\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314200 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-lock\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314242 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314266 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/20a57394-ad58-443c-9c3b-4ad74b00cf66-etc-swift\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314304 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-cache\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314328 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314697 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") device mount path \"/mnt/openstack/pv03\"" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.314723 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") device mount path \"/mnt/openstack/pv09\"" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.315105 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-lock\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.315186 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-cache\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.315211 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/20a57394-ad58-443c-9c3b-4ad74b00cf66-lock\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.315362 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/20a57394-ad58-443c-9c3b-4ad74b00cf66-cache\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.332349 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-etc-swift\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.342484 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/20a57394-ad58-443c-9c3b-4ad74b00cf66-etc-swift\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.345812 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smdx7\" (UniqueName: \"kubernetes.io/projected/6d2691c4-da4b-45cc-9fd4-13c002eb3dd2-kube-api-access-smdx7\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.346662 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lhf2\" (UniqueName: \"kubernetes.io/projected/20a57394-ad58-443c-9c3b-4ad74b00cf66-kube-api-access-9lhf2\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.349578 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-1\" (UID: \"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2\") " pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.360097 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-2\" (UID: \"20a57394-ad58-443c-9c3b-4ad74b00cf66\") " pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.369696 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.395743 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.415157 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c88182b-3428-4ab0-8a8c-939487bcc292-log-httpd\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.415222 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wcj6\" (UniqueName: \"kubernetes.io/projected/5c88182b-3428-4ab0-8a8c-939487bcc292-kube-api-access-2wcj6\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.415310 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c88182b-3428-4ab0-8a8c-939487bcc292-run-httpd\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.415367 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5c88182b-3428-4ab0-8a8c-939487bcc292-etc-swift\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.415391 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88182b-3428-4ab0-8a8c-939487bcc292-config-data\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.517744 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c88182b-3428-4ab0-8a8c-939487bcc292-run-httpd\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.518134 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5c88182b-3428-4ab0-8a8c-939487bcc292-etc-swift\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.518164 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88182b-3428-4ab0-8a8c-939487bcc292-config-data\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.518188 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c88182b-3428-4ab0-8a8c-939487bcc292-log-httpd\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.518216 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wcj6\" (UniqueName: \"kubernetes.io/projected/5c88182b-3428-4ab0-8a8c-939487bcc292-kube-api-access-2wcj6\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.519059 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c88182b-3428-4ab0-8a8c-939487bcc292-run-httpd\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.519330 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c88182b-3428-4ab0-8a8c-939487bcc292-log-httpd\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.525940 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88182b-3428-4ab0-8a8c-939487bcc292-config-data\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.526218 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5c88182b-3428-4ab0-8a8c-939487bcc292-etc-swift\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.543011 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wcj6\" (UniqueName: \"kubernetes.io/projected/5c88182b-3428-4ab0-8a8c-939487bcc292-kube-api-access-2wcj6\") pod \"swift-proxy-7d4fb88647-dsz49\" (UID: \"5c88182b-3428-4ab0-8a8c-939487bcc292\") " pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:13 crc kubenswrapper[4956]: I1211 22:19:13.607876 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.917621 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s98dt"] Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.926406 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-s98dt"] Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.932662 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-8flfm"] Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.933691 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.935845 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.936326 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 11 22:19:16 crc kubenswrapper[4956]: I1211 22:19:16.946159 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-8flfm"] Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.114368 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-swiftconf\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.114412 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-dispersionconf\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.114438 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-ring-data-devices\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.114468 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-scripts\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.114569 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d385dbd2-4908-4c42-b48f-20109e20e76f-etc-swift\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.114623 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkmr2\" (UniqueName: \"kubernetes.io/projected/d385dbd2-4908-4c42-b48f-20109e20e76f-kube-api-access-bkmr2\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.218531 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-scripts\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.220046 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-scripts\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.221124 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d385dbd2-4908-4c42-b48f-20109e20e76f-etc-swift\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.221254 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkmr2\" (UniqueName: \"kubernetes.io/projected/d385dbd2-4908-4c42-b48f-20109e20e76f-kube-api-access-bkmr2\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.221356 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-swiftconf\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.221376 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-dispersionconf\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.221410 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-ring-data-devices\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.222410 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-ring-data-devices\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.264287 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d385dbd2-4908-4c42-b48f-20109e20e76f-etc-swift\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.271997 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-dispersionconf\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.275494 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-swiftconf\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.278428 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkmr2\" (UniqueName: \"kubernetes.io/projected/d385dbd2-4908-4c42-b48f-20109e20e76f-kube-api-access-bkmr2\") pod \"swift-ring-rebalance-8flfm\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.438517 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.453134 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49"] Dec 11 22:19:17 crc kubenswrapper[4956]: W1211 22:19:17.463220 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c88182b_3428_4ab0_8a8c_939487bcc292.slice/crio-c50c271fb94911e388e1b94db9dca12509ed2ac218ccb0b918885a3d059e0f4c WatchSource:0}: Error finding container c50c271fb94911e388e1b94db9dca12509ed2ac218ccb0b918885a3d059e0f4c: Status 404 returned error can't find the container with id c50c271fb94911e388e1b94db9dca12509ed2ac218ccb0b918885a3d059e0f4c Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.554441 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.569865 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 11 22:19:17 crc kubenswrapper[4956]: W1211 22:19:17.582436 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20a57394_ad58_443c_9c3b_4ad74b00cf66.slice/crio-2492f8059ffdbd091937c604199423c23ce1aa39c30ef47e03706def3ab1e797 WatchSource:0}: Error finding container 2492f8059ffdbd091937c604199423c23ce1aa39c30ef47e03706def3ab1e797: Status 404 returned error can't find the container with id 2492f8059ffdbd091937c604199423c23ce1aa39c30ef47e03706def3ab1e797 Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.631630 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" event={"ID":"5c88182b-3428-4ab0-8a8c-939487bcc292","Type":"ContainerStarted","Data":"c50c271fb94911e388e1b94db9dca12509ed2ac218ccb0b918885a3d059e0f4c"} Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.633037 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"866cec93ab32d9670c344123c425824ff40384504b248dc15a0ed2a4da758bec"} Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.634130 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"2492f8059ffdbd091937c604199423c23ce1aa39c30ef47e03706def3ab1e797"} Dec 11 22:19:17 crc kubenswrapper[4956]: I1211 22:19:17.836106 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-8flfm"] Dec 11 22:19:17 crc kubenswrapper[4956]: W1211 22:19:17.852167 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd385dbd2_4908_4c42_b48f_20109e20e76f.slice/crio-8fa71df6b213cebe814146e104b654727e1e14de6af82e14f144fc502d622839 WatchSource:0}: Error finding container 8fa71df6b213cebe814146e104b654727e1e14de6af82e14f144fc502d622839: Status 404 returned error can't find the container with id 8fa71df6b213cebe814146e104b654727e1e14de6af82e14f144fc502d622839 Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.082303 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66c28ddd-265d-4b2b-becd-450ee962da58" path="/var/lib/kubelet/pods/66c28ddd-265d-4b2b-becd-450ee962da58/volumes" Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.679542 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"e43d4e5ed240eea3514ab0f89d0880f8a10d355833a28c1b1ecee6bddcfb901b"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.679854 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"618c481a3e4223e156f2cec38d5123d31280fbdb063112e70fbef85b8627de9d"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.682761 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" event={"ID":"5c88182b-3428-4ab0-8a8c-939487bcc292","Type":"ContainerStarted","Data":"c96ce3e7b7dc73ad685568f54d29b0aa073a0f80bd0d0a6ec17ada5c12ebe270"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.682831 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" event={"ID":"5c88182b-3428-4ab0-8a8c-939487bcc292","Type":"ContainerStarted","Data":"693b08aaeb1d0a0b531a407606ca4b0f3510cf153e83f0ebed94580038c09295"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.683743 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.683870 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.691020 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" event={"ID":"d385dbd2-4908-4c42-b48f-20109e20e76f","Type":"ContainerStarted","Data":"88f8b2fef8e39181f62ad1f59011402d72035a524b9ad5eb0e0101ccb984503c"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.691050 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" event={"ID":"d385dbd2-4908-4c42-b48f-20109e20e76f","Type":"ContainerStarted","Data":"8fa71df6b213cebe814146e104b654727e1e14de6af82e14f144fc502d622839"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.695815 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"188ba066b0873b2195706136f5671ba0aa84363fceb29f40a7d591ee70b8f759"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.695867 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"2a28544ba2e3feb67d4b6cf03d610a7710b597783b82761421c146a8855e5f04"} Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.707858 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" podStartSLOduration=5.707835646 podStartE2EDuration="5.707835646s" podCreationTimestamp="2025-12-11 22:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:19:18.702662856 +0000 UTC m=+1851.147041006" watchObservedRunningTime="2025-12-11 22:19:18.707835646 +0000 UTC m=+1851.152213796" Dec 11 22:19:18 crc kubenswrapper[4956]: I1211 22:19:18.739639 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" podStartSLOduration=2.739619272 podStartE2EDuration="2.739619272s" podCreationTimestamp="2025-12-11 22:19:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:19:18.721731411 +0000 UTC m=+1851.166109581" watchObservedRunningTime="2025-12-11 22:19:18.739619272 +0000 UTC m=+1851.183997422" Dec 11 22:19:19 crc kubenswrapper[4956]: I1211 22:19:19.705452 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"bcb7b1837ace08684314c6d8226919580bca4fcafebf1a305c4d98f538acf278"} Dec 11 22:19:19 crc kubenswrapper[4956]: I1211 22:19:19.705796 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"78995d5607bfdb5a2023ba064ac48fcfaf7bd2a6b7ff7c60808eef24d792bd57"} Dec 11 22:19:19 crc kubenswrapper[4956]: I1211 22:19:19.705807 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"9bd4903812f0feee220c211b3bf12632ddd18756006705687e41233ddba57dc3"} Dec 11 22:19:19 crc kubenswrapper[4956]: I1211 22:19:19.707882 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"cccd281c5b973f44e9952c61b214421aa76db3c5edaca1b8c93bdc5a8f5e1f68"} Dec 11 22:19:19 crc kubenswrapper[4956]: I1211 22:19:19.707997 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"ae08fbfdfeaf711654f779892877cd2764bfd88ee924fea2a36aef790d6d455b"} Dec 11 22:19:19 crc kubenswrapper[4956]: I1211 22:19:19.708014 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"ab20f1b01d73e8917ff71c0d1f0a6e0049a3ec54934b5a8f413fb630947fb3b8"} Dec 11 22:19:20 crc kubenswrapper[4956]: I1211 22:19:20.719125 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"6e2f8375a283d5616620bd6ef73e52b986c16c29af31a160ff5aa35528c55709"} Dec 11 22:19:20 crc kubenswrapper[4956]: I1211 22:19:20.719190 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"3ccee2cf7402dce039dad2ab88e08b93d1b36d2b67f7afc23dcf9df365a79cd5"} Dec 11 22:19:20 crc kubenswrapper[4956]: I1211 22:19:20.719209 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"49063c69d153a94c5c9485a12e8778e38fe869f4ffe9c077e3acb67375efe6d3"} Dec 11 22:19:20 crc kubenswrapper[4956]: I1211 22:19:20.723824 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"51c1ba54672ba97fae3415f44c35812c3ecdc056de9ffa633f0cd8331d77b697"} Dec 11 22:19:20 crc kubenswrapper[4956]: I1211 22:19:20.723886 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"17d894b9f3dff531cbc580f7926d90f682b197fcd2f8716bad193a1838919cdc"} Dec 11 22:19:20 crc kubenswrapper[4956]: I1211 22:19:20.723905 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"2a8c3c5af176a1a62abebcf5f3c3d8033fb9a57146d80a0e49e1ab251c205fd9"} Dec 11 22:19:21 crc kubenswrapper[4956]: I1211 22:19:21.747260 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"75d135bab10056b401aafcf01247877e24e86c06338f132ca8e60c6046e02f1a"} Dec 11 22:19:21 crc kubenswrapper[4956]: I1211 22:19:21.747603 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"06c2098158465ca32a3fcc53b825014c0ef5db71183073854d925c09788b73d8"} Dec 11 22:19:21 crc kubenswrapper[4956]: I1211 22:19:21.747616 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"5da5a3a107038dd0a8c551f0f84141f195e90e835b10781aa8057f067ffd23f2"} Dec 11 22:19:21 crc kubenswrapper[4956]: I1211 22:19:21.773268 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"eab1c91e3f9b3c6688ce8839d7eeedbca5359d9814f262f1605a3d90db48e212"} Dec 11 22:19:21 crc kubenswrapper[4956]: I1211 22:19:21.773327 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"65aae566e3b07617e9e02df8bd1b43c58ef08d4593000062f51e243e0e5391db"} Dec 11 22:19:21 crc kubenswrapper[4956]: I1211 22:19:21.773336 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"cd94983770dc116a3a9d7e00f168621b90cfab7b8ee9cc90312c9fbe0af14fa8"} Dec 11 22:19:22 crc kubenswrapper[4956]: I1211 22:19:22.789988 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"e8492bdf3c3bbd4132aa6e49e1dc49ae36d5a73c57b44dfd652744e8ed027cb0"} Dec 11 22:19:22 crc kubenswrapper[4956]: I1211 22:19:22.795697 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"6d615f27edb7203851f02088b974a7d13858f2040c4eea0cb9d3ab1f5fd78044"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.612147 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.612824 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-7d4fb88647-dsz49" Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.808325 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"6df67e0e915c46f5c224f98f9b05a4e9cdcdd9e6e252dc43ce64f6dff891714b"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.808402 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"54fd57eda940c11367bdc598f8f504a641575e244cdeb81634f930f86c7acfef"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.808416 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"20a57394-ad58-443c-9c3b-4ad74b00cf66","Type":"ContainerStarted","Data":"335635bfc4af79102b68486eacc43c5fc15fb87b7935dcf8cfd55bcb05a1f406"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.822910 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"3f9c0081df758ae77f5f101922993d86333e36df7721d1203119f8043d666e2a"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.822964 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"1a0c939f625365a350623d97d32539f9b2e2124b522ba4b9a936c9a3c740c451"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.822980 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"6d2691c4-da4b-45cc-9fd4-13c002eb3dd2","Type":"ContainerStarted","Data":"d96cb1dff7dd21561d3226de2862eb73ed8ca230e7015c2db03856af694a318b"} Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.853741 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-2" podStartSLOduration=12.853722156 podStartE2EDuration="12.853722156s" podCreationTimestamp="2025-12-11 22:19:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:19:23.85015524 +0000 UTC m=+1856.294533410" watchObservedRunningTime="2025-12-11 22:19:23.853722156 +0000 UTC m=+1856.298100306" Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.900032 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-1" podStartSLOduration=12.900012554 podStartE2EDuration="12.900012554s" podCreationTimestamp="2025-12-11 22:19:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:19:23.893033696 +0000 UTC m=+1856.337411856" watchObservedRunningTime="2025-12-11 22:19:23.900012554 +0000 UTC m=+1856.344390704" Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.913966 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914476 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-server" containerID="cri-o://bc4ce11530db4d7aa94b181d0ac5bbe41016bb36bc3b81f2ef8ed18300920c2f" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914507 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-server" containerID="cri-o://2124c7e81a8406a24c2f606c393966b55e66c61c9149af1054091a92d6754f7e" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914516 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-sharder" containerID="cri-o://a0e4baf33050ecc4d04e84bbd15bea1f4382c51edfca0492260184b2d36edb96" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914595 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="swift-recon-cron" containerID="cri-o://46ca53a46b51666e802ebdb0db610c33c39710bab87681373f308a5de47cd1cc" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914587 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-auditor" containerID="cri-o://5240ec284acdffff31ad3df9116325845d3f936d9b94892edd27480f7eda5d43" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914643 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="rsync" containerID="cri-o://818eb5d4ca9845544fd20579d0975240b833028bfcf4cf6cb2f54eddf77d08ff" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914655 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-updater" containerID="cri-o://0df5dfbf514fc82c9b20389bee8122a1b54f5466d75e11570c5295139ccae9e6" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914678 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-expirer" containerID="cri-o://e0eb0453deda9b30858bd0dafd5d97dacda1037dc916bbee7ba68709bd4cce5e" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914614 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-replicator" containerID="cri-o://f42b34da00640445d7f41f37a60789ee942e2b56e2f1d843f6f597c3bd680064" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914704 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-replicator" containerID="cri-o://0bf8628f6db21e853b47a9127d07107df97ff5cb36f70ff0f9053e9b4a29abbf" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914727 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-updater" containerID="cri-o://bfc1c3080b6a427dbb30b3c3e2a54714de087157c54e925845c077410dc06012" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914719 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-reaper" containerID="cri-o://531915597e5bfc8894bd3c4cdbd292cc8c16328e98d860fae953763f937f355b" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914583 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-replicator" containerID="cri-o://31b7e3d8beed14cdcd332ad70f7932dac013362e417886c5989aa65e797ccd34" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914666 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-auditor" containerID="cri-o://03785274dd06bb71bd539ea0191ea87bffb303056e8ff08dcc27a86d6d20cde7" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.914740 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-auditor" containerID="cri-o://6c529247cb79b86c8840972bdb2535e99c7acfb5ca83c4e7c419b12af81643bb" gracePeriod=30 Dec 11 22:19:23 crc kubenswrapper[4956]: I1211 22:19:23.919538 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-server" containerID="cri-o://fa26039f3dc2afc660aeb61b4108bd16e9f7994d916f3b78a8c8b3206e17ffc6" gracePeriod=30 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.020972 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:19:24 crc kubenswrapper[4956]: E1211 22:19:24.021260 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853680 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="a0e4baf33050ecc4d04e84bbd15bea1f4382c51edfca0492260184b2d36edb96" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853713 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="818eb5d4ca9845544fd20579d0975240b833028bfcf4cf6cb2f54eddf77d08ff" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853724 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="e0eb0453deda9b30858bd0dafd5d97dacda1037dc916bbee7ba68709bd4cce5e" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853731 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="bfc1c3080b6a427dbb30b3c3e2a54714de087157c54e925845c077410dc06012" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853741 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="6c529247cb79b86c8840972bdb2535e99c7acfb5ca83c4e7c419b12af81643bb" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853750 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="0bf8628f6db21e853b47a9127d07107df97ff5cb36f70ff0f9053e9b4a29abbf" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853760 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="2124c7e81a8406a24c2f606c393966b55e66c61c9149af1054091a92d6754f7e" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853755 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"a0e4baf33050ecc4d04e84bbd15bea1f4382c51edfca0492260184b2d36edb96"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853843 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"818eb5d4ca9845544fd20579d0975240b833028bfcf4cf6cb2f54eddf77d08ff"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853861 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"e0eb0453deda9b30858bd0dafd5d97dacda1037dc916bbee7ba68709bd4cce5e"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853873 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"bfc1c3080b6a427dbb30b3c3e2a54714de087157c54e925845c077410dc06012"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853888 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"6c529247cb79b86c8840972bdb2535e99c7acfb5ca83c4e7c419b12af81643bb"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853901 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"0bf8628f6db21e853b47a9127d07107df97ff5cb36f70ff0f9053e9b4a29abbf"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853914 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"2124c7e81a8406a24c2f606c393966b55e66c61c9149af1054091a92d6754f7e"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853930 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"0df5dfbf514fc82c9b20389bee8122a1b54f5466d75e11570c5295139ccae9e6"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853788 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="0df5dfbf514fc82c9b20389bee8122a1b54f5466d75e11570c5295139ccae9e6" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853957 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="5240ec284acdffff31ad3df9116325845d3f936d9b94892edd27480f7eda5d43" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853975 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="31b7e3d8beed14cdcd332ad70f7932dac013362e417886c5989aa65e797ccd34" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853986 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="fa26039f3dc2afc660aeb61b4108bd16e9f7994d916f3b78a8c8b3206e17ffc6" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.853996 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="531915597e5bfc8894bd3c4cdbd292cc8c16328e98d860fae953763f937f355b" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854007 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="03785274dd06bb71bd539ea0191ea87bffb303056e8ff08dcc27a86d6d20cde7" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854016 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="f42b34da00640445d7f41f37a60789ee942e2b56e2f1d843f6f597c3bd680064" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854025 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="bc4ce11530db4d7aa94b181d0ac5bbe41016bb36bc3b81f2ef8ed18300920c2f" exitCode=0 Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854103 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"5240ec284acdffff31ad3df9116325845d3f936d9b94892edd27480f7eda5d43"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854153 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"31b7e3d8beed14cdcd332ad70f7932dac013362e417886c5989aa65e797ccd34"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854168 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"fa26039f3dc2afc660aeb61b4108bd16e9f7994d916f3b78a8c8b3206e17ffc6"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854181 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"531915597e5bfc8894bd3c4cdbd292cc8c16328e98d860fae953763f937f355b"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854198 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"03785274dd06bb71bd539ea0191ea87bffb303056e8ff08dcc27a86d6d20cde7"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854211 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"f42b34da00640445d7f41f37a60789ee942e2b56e2f1d843f6f597c3bd680064"} Dec 11 22:19:24 crc kubenswrapper[4956]: I1211 22:19:24.854224 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"bc4ce11530db4d7aa94b181d0ac5bbe41016bb36bc3b81f2ef8ed18300920c2f"} Dec 11 22:19:28 crc kubenswrapper[4956]: I1211 22:19:28.886532 4956 generic.go:334] "Generic (PLEG): container finished" podID="d385dbd2-4908-4c42-b48f-20109e20e76f" containerID="88f8b2fef8e39181f62ad1f59011402d72035a524b9ad5eb0e0101ccb984503c" exitCode=0 Dec 11 22:19:28 crc kubenswrapper[4956]: I1211 22:19:28.886653 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" event={"ID":"d385dbd2-4908-4c42-b48f-20109e20e76f","Type":"ContainerDied","Data":"88f8b2fef8e39181f62ad1f59011402d72035a524b9ad5eb0e0101ccb984503c"} Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.169382 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.439520 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-swiftconf\") pod \"d385dbd2-4908-4c42-b48f-20109e20e76f\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.439661 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-dispersionconf\") pod \"d385dbd2-4908-4c42-b48f-20109e20e76f\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.439703 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkmr2\" (UniqueName: \"kubernetes.io/projected/d385dbd2-4908-4c42-b48f-20109e20e76f-kube-api-access-bkmr2\") pod \"d385dbd2-4908-4c42-b48f-20109e20e76f\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.439729 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-scripts\") pod \"d385dbd2-4908-4c42-b48f-20109e20e76f\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.439760 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d385dbd2-4908-4c42-b48f-20109e20e76f-etc-swift\") pod \"d385dbd2-4908-4c42-b48f-20109e20e76f\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.439792 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-ring-data-devices\") pod \"d385dbd2-4908-4c42-b48f-20109e20e76f\" (UID: \"d385dbd2-4908-4c42-b48f-20109e20e76f\") " Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.440533 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d385dbd2-4908-4c42-b48f-20109e20e76f" (UID: "d385dbd2-4908-4c42-b48f-20109e20e76f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.440880 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d385dbd2-4908-4c42-b48f-20109e20e76f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d385dbd2-4908-4c42-b48f-20109e20e76f" (UID: "d385dbd2-4908-4c42-b48f-20109e20e76f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.448697 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d385dbd2-4908-4c42-b48f-20109e20e76f-kube-api-access-bkmr2" (OuterVolumeSpecName: "kube-api-access-bkmr2") pod "d385dbd2-4908-4c42-b48f-20109e20e76f" (UID: "d385dbd2-4908-4c42-b48f-20109e20e76f"). InnerVolumeSpecName "kube-api-access-bkmr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.459408 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d385dbd2-4908-4c42-b48f-20109e20e76f" (UID: "d385dbd2-4908-4c42-b48f-20109e20e76f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.459932 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d385dbd2-4908-4c42-b48f-20109e20e76f" (UID: "d385dbd2-4908-4c42-b48f-20109e20e76f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.462145 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-scripts" (OuterVolumeSpecName: "scripts") pod "d385dbd2-4908-4c42-b48f-20109e20e76f" (UID: "d385dbd2-4908-4c42-b48f-20109e20e76f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.541442 4956 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.541487 4956 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d385dbd2-4908-4c42-b48f-20109e20e76f-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.541516 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkmr2\" (UniqueName: \"kubernetes.io/projected/d385dbd2-4908-4c42-b48f-20109e20e76f-kube-api-access-bkmr2\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.541530 4956 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-scripts\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.541542 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d385dbd2-4908-4c42-b48f-20109e20e76f-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.541553 4956 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d385dbd2-4908-4c42-b48f-20109e20e76f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.908193 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" event={"ID":"d385dbd2-4908-4c42-b48f-20109e20e76f","Type":"ContainerDied","Data":"8fa71df6b213cebe814146e104b654727e1e14de6af82e14f144fc502d622839"} Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.908254 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-8flfm" Dec 11 22:19:30 crc kubenswrapper[4956]: I1211 22:19:30.908263 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fa71df6b213cebe814146e104b654727e1e14de6af82e14f144fc502d622839" Dec 11 22:19:34 crc kubenswrapper[4956]: I1211 22:19:34.826939 4956 scope.go:117] "RemoveContainer" containerID="b1074594adcc4a01dce7038a77f03a4c85373c8f92c8698a64bfc08926ae86b9" Dec 11 22:19:38 crc kubenswrapper[4956]: I1211 22:19:38.029735 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:19:38 crc kubenswrapper[4956]: E1211 22:19:38.030580 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:19:51 crc kubenswrapper[4956]: I1211 22:19:51.021724 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:19:51 crc kubenswrapper[4956]: E1211 22:19:51.023074 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.127935 4956 generic.go:334] "Generic (PLEG): container finished" podID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerID="46ca53a46b51666e802ebdb0db610c33c39710bab87681373f308a5de47cd1cc" exitCode=137 Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.128009 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"46ca53a46b51666e802ebdb0db610c33c39710bab87681373f308a5de47cd1cc"} Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.276104 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.299572 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") pod \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.299655 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.299707 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ffwk\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-kube-api-access-7ffwk\") pod \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.299807 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-lock\") pod \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.299840 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-cache\") pod \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\" (UID: \"1844e361-ee35-4c2f-8bc6-8ddd5ada5445\") " Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.301190 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-lock" (OuterVolumeSpecName: "lock") pod "1844e361-ee35-4c2f-8bc6-8ddd5ada5445" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.301353 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-cache" (OuterVolumeSpecName: "cache") pod "1844e361-ee35-4c2f-8bc6-8ddd5ada5445" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.305990 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-kube-api-access-7ffwk" (OuterVolumeSpecName: "kube-api-access-7ffwk") pod "1844e361-ee35-4c2f-8bc6-8ddd5ada5445" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445"). InnerVolumeSpecName "kube-api-access-7ffwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.306000 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "swift") pod "1844e361-ee35-4c2f-8bc6-8ddd5ada5445" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.307048 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "1844e361-ee35-4c2f-8bc6-8ddd5ada5445" (UID: "1844e361-ee35-4c2f-8bc6-8ddd5ada5445"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.402179 4956 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.402243 4956 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.402258 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ffwk\" (UniqueName: \"kubernetes.io/projected/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-kube-api-access-7ffwk\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.402272 4956 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-lock\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.402284 4956 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1844e361-ee35-4c2f-8bc6-8ddd5ada5445-cache\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.415944 4956 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 11 22:19:54 crc kubenswrapper[4956]: I1211 22:19:54.503268 4956 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081036 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q85sv"] Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081390 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-updater" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081409 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-updater" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081421 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081428 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081439 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="rsync" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081447 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="rsync" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081464 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-server" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081471 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-server" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081483 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081490 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081503 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-reaper" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081510 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-reaper" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081523 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="swift-recon-cron" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081530 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="swift-recon-cron" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081544 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081551 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081562 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-server" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081569 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-server" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081581 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081589 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081604 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-sharder" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081612 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-sharder" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081625 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081632 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081643 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-updater" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081652 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-updater" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081663 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-expirer" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081670 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-expirer" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081678 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081685 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081704 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-server" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081711 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-server" Dec 11 22:19:55 crc kubenswrapper[4956]: E1211 22:19:55.081721 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d385dbd2-4908-4c42-b48f-20109e20e76f" containerName="swift-ring-rebalance" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081728 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d385dbd2-4908-4c42-b48f-20109e20e76f" containerName="swift-ring-rebalance" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081910 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-server" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081923 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081937 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-reaper" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081947 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081955 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-updater" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081963 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="d385dbd2-4908-4c42-b48f-20109e20e76f" containerName="swift-ring-rebalance" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081971 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081981 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-server" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.081991 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-updater" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082002 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-auditor" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082013 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082021 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="rsync" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082034 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-server" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082044 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="account-replicator" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082054 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="object-expirer" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082066 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="swift-recon-cron" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.082074 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" containerName="container-sharder" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.083509 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.105614 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q85sv"] Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.112317 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p9qd\" (UniqueName: \"kubernetes.io/projected/6033e1f1-b8a5-446c-829f-4505a5db344c-kube-api-access-5p9qd\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.112366 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-utilities\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.112606 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-catalog-content\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.143289 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"1844e361-ee35-4c2f-8bc6-8ddd5ada5445","Type":"ContainerDied","Data":"8845f0a0f103d536a83d8be26e4c90b585992e6e0bc0ba2b3bb943bac0ca2e24"} Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.143341 4956 scope.go:117] "RemoveContainer" containerID="a0e4baf33050ecc4d04e84bbd15bea1f4382c51edfca0492260184b2d36edb96" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.143454 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.165814 4956 scope.go:117] "RemoveContainer" containerID="46ca53a46b51666e802ebdb0db610c33c39710bab87681373f308a5de47cd1cc" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.180085 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.186039 4956 scope.go:117] "RemoveContainer" containerID="818eb5d4ca9845544fd20579d0975240b833028bfcf4cf6cb2f54eddf77d08ff" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.186145 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.212306 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.214197 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p9qd\" (UniqueName: \"kubernetes.io/projected/6033e1f1-b8a5-446c-829f-4505a5db344c-kube-api-access-5p9qd\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.214249 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-utilities\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.214378 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-catalog-content\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.214916 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-catalog-content\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.215494 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-utilities\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.216672 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.217934 4956 scope.go:117] "RemoveContainer" containerID="e0eb0453deda9b30858bd0dafd5d97dacda1037dc916bbee7ba68709bd4cce5e" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.236651 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p9qd\" (UniqueName: \"kubernetes.io/projected/6033e1f1-b8a5-446c-829f-4505a5db344c-kube-api-access-5p9qd\") pod \"redhat-operators-q85sv\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.239248 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.249403 4956 scope.go:117] "RemoveContainer" containerID="bfc1c3080b6a427dbb30b3c3e2a54714de087157c54e925845c077410dc06012" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.286598 4956 scope.go:117] "RemoveContainer" containerID="6c529247cb79b86c8840972bdb2535e99c7acfb5ca83c4e7c419b12af81643bb" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.305104 4956 scope.go:117] "RemoveContainer" containerID="0bf8628f6db21e853b47a9127d07107df97ff5cb36f70ff0f9053e9b4a29abbf" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.334622 4956 scope.go:117] "RemoveContainer" containerID="2124c7e81a8406a24c2f606c393966b55e66c61c9149af1054091a92d6754f7e" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.354555 4956 scope.go:117] "RemoveContainer" containerID="0df5dfbf514fc82c9b20389bee8122a1b54f5466d75e11570c5295139ccae9e6" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.377849 4956 scope.go:117] "RemoveContainer" containerID="5240ec284acdffff31ad3df9116325845d3f936d9b94892edd27480f7eda5d43" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.399092 4956 scope.go:117] "RemoveContainer" containerID="31b7e3d8beed14cdcd332ad70f7932dac013362e417886c5989aa65e797ccd34" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.400109 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.418344 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4de42e9b-773b-4150-809a-c9255878e80c-etc-swift\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.418399 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4de42e9b-773b-4150-809a-c9255878e80c-cache\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.418433 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4de42e9b-773b-4150-809a-c9255878e80c-lock\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.418567 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.418699 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psvgp\" (UniqueName: \"kubernetes.io/projected/4de42e9b-773b-4150-809a-c9255878e80c-kube-api-access-psvgp\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.430992 4956 scope.go:117] "RemoveContainer" containerID="fa26039f3dc2afc660aeb61b4108bd16e9f7994d916f3b78a8c8b3206e17ffc6" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.494087 4956 scope.go:117] "RemoveContainer" containerID="531915597e5bfc8894bd3c4cdbd292cc8c16328e98d860fae953763f937f355b" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.519981 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4de42e9b-773b-4150-809a-c9255878e80c-lock\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520073 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520121 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psvgp\" (UniqueName: \"kubernetes.io/projected/4de42e9b-773b-4150-809a-c9255878e80c-kube-api-access-psvgp\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520171 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4de42e9b-773b-4150-809a-c9255878e80c-etc-swift\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520193 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4de42e9b-773b-4150-809a-c9255878e80c-cache\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520458 4956 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") device mount path \"/mnt/openstack/pv08\"" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520639 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/4de42e9b-773b-4150-809a-c9255878e80c-lock\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.520720 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/4de42e9b-773b-4150-809a-c9255878e80c-cache\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.526247 4956 scope.go:117] "RemoveContainer" containerID="03785274dd06bb71bd539ea0191ea87bffb303056e8ff08dcc27a86d6d20cde7" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.531086 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4de42e9b-773b-4150-809a-c9255878e80c-etc-swift\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.541814 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psvgp\" (UniqueName: \"kubernetes.io/projected/4de42e9b-773b-4150-809a-c9255878e80c-kube-api-access-psvgp\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.549945 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"4de42e9b-773b-4150-809a-c9255878e80c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.591795 4956 scope.go:117] "RemoveContainer" containerID="f42b34da00640445d7f41f37a60789ee942e2b56e2f1d843f6f597c3bd680064" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.607375 4956 scope.go:117] "RemoveContainer" containerID="bc4ce11530db4d7aa94b181d0ac5bbe41016bb36bc3b81f2ef8ed18300920c2f" Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.639017 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q85sv"] Dec 11 22:19:55 crc kubenswrapper[4956]: I1211 22:19:55.837585 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 11 22:19:56 crc kubenswrapper[4956]: I1211 22:19:56.036749 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1844e361-ee35-4c2f-8bc6-8ddd5ada5445" path="/var/lib/kubelet/pods/1844e361-ee35-4c2f-8bc6-8ddd5ada5445/volumes" Dec 11 22:19:56 crc kubenswrapper[4956]: I1211 22:19:56.157147 4956 generic.go:334] "Generic (PLEG): container finished" podID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerID="f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4" exitCode=0 Dec 11 22:19:56 crc kubenswrapper[4956]: I1211 22:19:56.157672 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerDied","Data":"f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4"} Dec 11 22:19:56 crc kubenswrapper[4956]: I1211 22:19:56.157701 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerStarted","Data":"4532d18d12a365cfaa678d25ba2374998b5934586f1cee6c2733d96f04bd09c3"} Dec 11 22:19:56 crc kubenswrapper[4956]: I1211 22:19:56.159198 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 22:19:56 crc kubenswrapper[4956]: I1211 22:19:56.270983 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 11 22:19:56 crc kubenswrapper[4956]: W1211 22:19:56.274921 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4de42e9b_773b_4150_809a_c9255878e80c.slice/crio-b83c50645bf80e56443b00168d768b7781d40e628f288aff4cf6955b58dcbac6 WatchSource:0}: Error finding container b83c50645bf80e56443b00168d768b7781d40e628f288aff4cf6955b58dcbac6: Status 404 returned error can't find the container with id b83c50645bf80e56443b00168d768b7781d40e628f288aff4cf6955b58dcbac6 Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.169239 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerStarted","Data":"41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3"} Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.174699 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"b3d56069a81c7303d3c714f74345aed46e0540cc04146a4080a03f3afb86a157"} Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.174734 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"524c9da2e5f51bfe539dd3d824ee6ae20be98f8645a90c57b897703dd2cee999"} Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.174746 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"c579f5c9d42428c66b782cbc00f985d8641790e13c193924a986e07545c20faa"} Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.174761 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"c398a5d1aabb665cb1996926b45a8e4290dd23ea5566fa783ea78686dd79cd1f"} Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.174795 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"64b4181f31a75542d9377c9408c86caf23bc3141a6abfea168d66567c45dfa9a"} Dec 11 22:19:57 crc kubenswrapper[4956]: I1211 22:19:57.174805 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"b83c50645bf80e56443b00168d768b7781d40e628f288aff4cf6955b58dcbac6"} Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.184345 4956 generic.go:334] "Generic (PLEG): container finished" podID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerID="41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3" exitCode=0 Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.184448 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerDied","Data":"41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3"} Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.191106 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"fe720e168ded07403a1c0663da7fe87e53ca42599fb15250f06be89401c99fd8"} Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.191152 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"178967d2e3e485176061f31246bca775b9d6553796648a4c73af2203bb8540a1"} Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.191173 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"82877e0bbdb868e61f83360afc2f665245cf8525b7583805453309bde707a613"} Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.191183 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"daf1facd112956b7ab71887de1b4ce5937e89b5b7f0f55bf419695abff66cbc2"} Dec 11 22:19:58 crc kubenswrapper[4956]: I1211 22:19:58.191193 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"a94c0b63caec934827802999a46937152372dd017d46defe87bafa253f0ae1da"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.234535 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerStarted","Data":"2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.254264 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"49a20077660be4b19ac249daab59b4d5e8cb5042cc72dbc7db1760889ba72bd5"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.254676 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"a4a1515d557637bd90fa60c75dc7dfa548696e0465247bf35b0b0102302a0c3e"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.254692 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"307e22c55feaf2ef9e7cafe7ddd4f0591e49573edc37cc8877dbfaa93258b313"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.254705 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"61392bbdb48bf8e30ad6272285558dd3dce4428c4da7df6e727ad068e6b00b92"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.254717 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"4de42e9b-773b-4150-809a-c9255878e80c","Type":"ContainerStarted","Data":"181a32eace35fdaa528ef810fe09c2ddeb7e2eaeb0d26f619e72c0c4d8e59362"} Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.263829 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q85sv" podStartSLOduration=1.588765926 podStartE2EDuration="4.263814203s" podCreationTimestamp="2025-12-11 22:19:55 +0000 UTC" firstStartedPulling="2025-12-11 22:19:56.158942673 +0000 UTC m=+1888.603320823" lastFinishedPulling="2025-12-11 22:19:58.83399095 +0000 UTC m=+1891.278369100" observedRunningTime="2025-12-11 22:19:59.259591969 +0000 UTC m=+1891.703970129" watchObservedRunningTime="2025-12-11 22:19:59.263814203 +0000 UTC m=+1891.708192353" Dec 11 22:19:59 crc kubenswrapper[4956]: I1211 22:19:59.298279 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=4.298257081 podStartE2EDuration="4.298257081s" podCreationTimestamp="2025-12-11 22:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 22:19:59.289622509 +0000 UTC m=+1891.734000669" watchObservedRunningTime="2025-12-11 22:19:59.298257081 +0000 UTC m=+1891.742635231" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.401032 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.401343 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.459432 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.477079 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tkclp"] Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.480116 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.492493 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkclp"] Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.586657 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w69qk\" (UniqueName: \"kubernetes.io/projected/9b65dff7-bef2-457b-b521-43d59157463a-kube-api-access-w69qk\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.586748 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-utilities\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.586856 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-catalog-content\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.688403 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-utilities\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.688641 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-catalog-content\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.689153 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-utilities\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.689378 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-catalog-content\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.689491 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w69qk\" (UniqueName: \"kubernetes.io/projected/9b65dff7-bef2-457b-b521-43d59157463a-kube-api-access-w69qk\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.711440 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w69qk\" (UniqueName: \"kubernetes.io/projected/9b65dff7-bef2-457b-b521-43d59157463a-kube-api-access-w69qk\") pod \"redhat-marketplace-tkclp\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:05 crc kubenswrapper[4956]: I1211 22:20:05.814070 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:06 crc kubenswrapper[4956]: I1211 22:20:06.024525 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:20:06 crc kubenswrapper[4956]: E1211 22:20:06.024852 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:20:06 crc kubenswrapper[4956]: I1211 22:20:06.241632 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkclp"] Dec 11 22:20:06 crc kubenswrapper[4956]: I1211 22:20:06.321746 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerStarted","Data":"184781d2200af96e7fa605f09b36b0bc5e5b5f97fda99b70fa726270e69da0dd"} Dec 11 22:20:06 crc kubenswrapper[4956]: I1211 22:20:06.361788 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:20:07 crc kubenswrapper[4956]: I1211 22:20:07.330741 4956 generic.go:334] "Generic (PLEG): container finished" podID="9b65dff7-bef2-457b-b521-43d59157463a" containerID="3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e" exitCode=0 Dec 11 22:20:07 crc kubenswrapper[4956]: I1211 22:20:07.330825 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerDied","Data":"3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e"} Dec 11 22:20:08 crc kubenswrapper[4956]: I1211 22:20:08.860883 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q85sv"] Dec 11 22:20:08 crc kubenswrapper[4956]: I1211 22:20:08.861641 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q85sv" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="registry-server" containerID="cri-o://2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987" gracePeriod=2 Dec 11 22:20:09 crc kubenswrapper[4956]: I1211 22:20:09.348501 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerStarted","Data":"492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56"} Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.288394 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.358149 4956 generic.go:334] "Generic (PLEG): container finished" podID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerID="2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987" exitCode=0 Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.358194 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q85sv" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.358228 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerDied","Data":"2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987"} Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.358311 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q85sv" event={"ID":"6033e1f1-b8a5-446c-829f-4505a5db344c","Type":"ContainerDied","Data":"4532d18d12a365cfaa678d25ba2374998b5934586f1cee6c2733d96f04bd09c3"} Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.358337 4956 scope.go:117] "RemoveContainer" containerID="2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.360274 4956 generic.go:334] "Generic (PLEG): container finished" podID="9b65dff7-bef2-457b-b521-43d59157463a" containerID="492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56" exitCode=0 Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.360308 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerDied","Data":"492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56"} Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.367333 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-catalog-content\") pod \"6033e1f1-b8a5-446c-829f-4505a5db344c\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.367446 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-utilities\") pod \"6033e1f1-b8a5-446c-829f-4505a5db344c\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.367596 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p9qd\" (UniqueName: \"kubernetes.io/projected/6033e1f1-b8a5-446c-829f-4505a5db344c-kube-api-access-5p9qd\") pod \"6033e1f1-b8a5-446c-829f-4505a5db344c\" (UID: \"6033e1f1-b8a5-446c-829f-4505a5db344c\") " Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.368266 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-utilities" (OuterVolumeSpecName: "utilities") pod "6033e1f1-b8a5-446c-829f-4505a5db344c" (UID: "6033e1f1-b8a5-446c-829f-4505a5db344c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.374024 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6033e1f1-b8a5-446c-829f-4505a5db344c-kube-api-access-5p9qd" (OuterVolumeSpecName: "kube-api-access-5p9qd") pod "6033e1f1-b8a5-446c-829f-4505a5db344c" (UID: "6033e1f1-b8a5-446c-829f-4505a5db344c"). InnerVolumeSpecName "kube-api-access-5p9qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.387973 4956 scope.go:117] "RemoveContainer" containerID="41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.412396 4956 scope.go:117] "RemoveContainer" containerID="f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.437966 4956 scope.go:117] "RemoveContainer" containerID="2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987" Dec 11 22:20:10 crc kubenswrapper[4956]: E1211 22:20:10.438356 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987\": container with ID starting with 2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987 not found: ID does not exist" containerID="2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.438396 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987"} err="failed to get container status \"2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987\": rpc error: code = NotFound desc = could not find container \"2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987\": container with ID starting with 2936bd137b05521adbf8e4b12e55f5dfb40c485dce8720bf104b5401e716c987 not found: ID does not exist" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.438418 4956 scope.go:117] "RemoveContainer" containerID="41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3" Dec 11 22:20:10 crc kubenswrapper[4956]: E1211 22:20:10.438661 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3\": container with ID starting with 41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3 not found: ID does not exist" containerID="41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.438692 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3"} err="failed to get container status \"41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3\": rpc error: code = NotFound desc = could not find container \"41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3\": container with ID starting with 41576cf4a6f98e8ba14792e23529c38c86fcefca9ac05880b587963b1ed309d3 not found: ID does not exist" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.438710 4956 scope.go:117] "RemoveContainer" containerID="f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4" Dec 11 22:20:10 crc kubenswrapper[4956]: E1211 22:20:10.439116 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4\": container with ID starting with f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4 not found: ID does not exist" containerID="f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.439138 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4"} err="failed to get container status \"f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4\": rpc error: code = NotFound desc = could not find container \"f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4\": container with ID starting with f4793b8639408db01588a830227c9a62357263bb2cd02f93014365ece4b1a3f4 not found: ID does not exist" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.469512 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p9qd\" (UniqueName: \"kubernetes.io/projected/6033e1f1-b8a5-446c-829f-4505a5db344c-kube-api-access-5p9qd\") on node \"crc\" DevicePath \"\"" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.469551 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.497500 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6033e1f1-b8a5-446c-829f-4505a5db344c" (UID: "6033e1f1-b8a5-446c-829f-4505a5db344c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.571097 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6033e1f1-b8a5-446c-829f-4505a5db344c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.706388 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q85sv"] Dec 11 22:20:10 crc kubenswrapper[4956]: I1211 22:20:10.712438 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q85sv"] Dec 11 22:20:11 crc kubenswrapper[4956]: I1211 22:20:11.372000 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerStarted","Data":"d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318"} Dec 11 22:20:11 crc kubenswrapper[4956]: I1211 22:20:11.388449 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tkclp" podStartSLOduration=2.660802863 podStartE2EDuration="6.388431515s" podCreationTimestamp="2025-12-11 22:20:05 +0000 UTC" firstStartedPulling="2025-12-11 22:20:07.332971699 +0000 UTC m=+1899.777349849" lastFinishedPulling="2025-12-11 22:20:11.060600341 +0000 UTC m=+1903.504978501" observedRunningTime="2025-12-11 22:20:11.387169561 +0000 UTC m=+1903.831547731" watchObservedRunningTime="2025-12-11 22:20:11.388431515 +0000 UTC m=+1903.832809665" Dec 11 22:20:12 crc kubenswrapper[4956]: I1211 22:20:12.032739 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" path="/var/lib/kubelet/pods/6033e1f1-b8a5-446c-829f-4505a5db344c/volumes" Dec 11 22:20:15 crc kubenswrapper[4956]: I1211 22:20:15.814666 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:15 crc kubenswrapper[4956]: I1211 22:20:15.815068 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:15 crc kubenswrapper[4956]: I1211 22:20:15.892281 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:16 crc kubenswrapper[4956]: I1211 22:20:16.457843 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:16 crc kubenswrapper[4956]: I1211 22:20:16.503694 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkclp"] Dec 11 22:20:18 crc kubenswrapper[4956]: I1211 22:20:18.439805 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tkclp" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="registry-server" containerID="cri-o://d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318" gracePeriod=2 Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.021343 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:20:19 crc kubenswrapper[4956]: E1211 22:20:19.022123 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.371213 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.415470 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w69qk\" (UniqueName: \"kubernetes.io/projected/9b65dff7-bef2-457b-b521-43d59157463a-kube-api-access-w69qk\") pod \"9b65dff7-bef2-457b-b521-43d59157463a\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.415526 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-utilities\") pod \"9b65dff7-bef2-457b-b521-43d59157463a\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.415700 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-catalog-content\") pod \"9b65dff7-bef2-457b-b521-43d59157463a\" (UID: \"9b65dff7-bef2-457b-b521-43d59157463a\") " Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.417121 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-utilities" (OuterVolumeSpecName: "utilities") pod "9b65dff7-bef2-457b-b521-43d59157463a" (UID: "9b65dff7-bef2-457b-b521-43d59157463a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.423024 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b65dff7-bef2-457b-b521-43d59157463a-kube-api-access-w69qk" (OuterVolumeSpecName: "kube-api-access-w69qk") pod "9b65dff7-bef2-457b-b521-43d59157463a" (UID: "9b65dff7-bef2-457b-b521-43d59157463a"). InnerVolumeSpecName "kube-api-access-w69qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.437826 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b65dff7-bef2-457b-b521-43d59157463a" (UID: "9b65dff7-bef2-457b-b521-43d59157463a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.449463 4956 generic.go:334] "Generic (PLEG): container finished" podID="9b65dff7-bef2-457b-b521-43d59157463a" containerID="d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318" exitCode=0 Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.449527 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkclp" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.449528 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerDied","Data":"d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318"} Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.449699 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkclp" event={"ID":"9b65dff7-bef2-457b-b521-43d59157463a","Type":"ContainerDied","Data":"184781d2200af96e7fa605f09b36b0bc5e5b5f97fda99b70fa726270e69da0dd"} Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.449729 4956 scope.go:117] "RemoveContainer" containerID="d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.469832 4956 scope.go:117] "RemoveContainer" containerID="492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.491425 4956 scope.go:117] "RemoveContainer" containerID="3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.494865 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkclp"] Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.501833 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkclp"] Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.517491 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w69qk\" (UniqueName: \"kubernetes.io/projected/9b65dff7-bef2-457b-b521-43d59157463a-kube-api-access-w69qk\") on node \"crc\" DevicePath \"\"" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.517545 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.517568 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b65dff7-bef2-457b-b521-43d59157463a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.533119 4956 scope.go:117] "RemoveContainer" containerID="d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318" Dec 11 22:20:19 crc kubenswrapper[4956]: E1211 22:20:19.533532 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318\": container with ID starting with d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318 not found: ID does not exist" containerID="d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.533571 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318"} err="failed to get container status \"d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318\": rpc error: code = NotFound desc = could not find container \"d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318\": container with ID starting with d1b08b6838457086a895eafd79b48b1277e577dac48f061bfb6e792459e53318 not found: ID does not exist" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.533599 4956 scope.go:117] "RemoveContainer" containerID="492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56" Dec 11 22:20:19 crc kubenswrapper[4956]: E1211 22:20:19.533957 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56\": container with ID starting with 492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56 not found: ID does not exist" containerID="492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.534013 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56"} err="failed to get container status \"492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56\": rpc error: code = NotFound desc = could not find container \"492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56\": container with ID starting with 492564f0d72e4b5c6cf265a7d3a63c6bc63232d43360996d08737baf28f16b56 not found: ID does not exist" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.534052 4956 scope.go:117] "RemoveContainer" containerID="3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e" Dec 11 22:20:19 crc kubenswrapper[4956]: E1211 22:20:19.534374 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e\": container with ID starting with 3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e not found: ID does not exist" containerID="3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e" Dec 11 22:20:19 crc kubenswrapper[4956]: I1211 22:20:19.534408 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e"} err="failed to get container status \"3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e\": rpc error: code = NotFound desc = could not find container \"3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e\": container with ID starting with 3768e335ff4b0e3b7de1d4b3347bdb8625640e86ca1a32991c0959d90fb8374e not found: ID does not exist" Dec 11 22:20:20 crc kubenswrapper[4956]: I1211 22:20:20.046180 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b65dff7-bef2-457b-b521-43d59157463a" path="/var/lib/kubelet/pods/9b65dff7-bef2-457b-b521-43d59157463a/volumes" Dec 11 22:20:31 crc kubenswrapper[4956]: I1211 22:20:31.021692 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:20:31 crc kubenswrapper[4956]: E1211 22:20:31.022721 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:20:42 crc kubenswrapper[4956]: I1211 22:20:42.022222 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:20:42 crc kubenswrapper[4956]: E1211 22:20:42.023315 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:20:53 crc kubenswrapper[4956]: I1211 22:20:53.021551 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:20:53 crc kubenswrapper[4956]: E1211 22:20:53.022593 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:21:08 crc kubenswrapper[4956]: I1211 22:21:08.027077 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:21:08 crc kubenswrapper[4956]: E1211 22:21:08.027911 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:21:23 crc kubenswrapper[4956]: I1211 22:21:23.021887 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:21:23 crc kubenswrapper[4956]: E1211 22:21:23.022807 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:21:37 crc kubenswrapper[4956]: I1211 22:21:37.021259 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:21:37 crc kubenswrapper[4956]: E1211 22:21:37.022139 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:21:52 crc kubenswrapper[4956]: I1211 22:21:52.021601 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:21:52 crc kubenswrapper[4956]: I1211 22:21:52.304204 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"31a2b79ec593947d626df8fd6abfea728f54de214ebbeb9d2857540713c4aad5"} Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.723235 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4tz7r"] Dec 11 22:23:25 crc kubenswrapper[4956]: E1211 22:23:25.724176 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="registry-server" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724191 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="registry-server" Dec 11 22:23:25 crc kubenswrapper[4956]: E1211 22:23:25.724212 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="extract-utilities" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724220 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="extract-utilities" Dec 11 22:23:25 crc kubenswrapper[4956]: E1211 22:23:25.724241 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="registry-server" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724248 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="registry-server" Dec 11 22:23:25 crc kubenswrapper[4956]: E1211 22:23:25.724258 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="extract-content" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724263 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="extract-content" Dec 11 22:23:25 crc kubenswrapper[4956]: E1211 22:23:25.724284 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="extract-content" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724290 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="extract-content" Dec 11 22:23:25 crc kubenswrapper[4956]: E1211 22:23:25.724302 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="extract-utilities" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724309 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="extract-utilities" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724456 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b65dff7-bef2-457b-b521-43d59157463a" containerName="registry-server" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.724475 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="6033e1f1-b8a5-446c-829f-4505a5db344c" containerName="registry-server" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.725435 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.743470 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tz7r"] Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.874124 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j44b\" (UniqueName: \"kubernetes.io/projected/f4af5057-f6ec-4292-a6c8-709b61140ae4-kube-api-access-9j44b\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.874179 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-catalog-content\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.874253 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-utilities\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.975538 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-utilities\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.975666 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j44b\" (UniqueName: \"kubernetes.io/projected/f4af5057-f6ec-4292-a6c8-709b61140ae4-kube-api-access-9j44b\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.975694 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-catalog-content\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.976202 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-utilities\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.976260 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-catalog-content\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:25 crc kubenswrapper[4956]: I1211 22:23:25.996877 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j44b\" (UniqueName: \"kubernetes.io/projected/f4af5057-f6ec-4292-a6c8-709b61140ae4-kube-api-access-9j44b\") pod \"community-operators-4tz7r\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:26 crc kubenswrapper[4956]: I1211 22:23:26.046412 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:26 crc kubenswrapper[4956]: I1211 22:23:26.591281 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tz7r"] Dec 11 22:23:27 crc kubenswrapper[4956]: I1211 22:23:27.129976 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerID="e9bde9a4acc5f10db7d00122dce559c77d6a48e21be30c6f7dc72778c50a2ef0" exitCode=0 Dec 11 22:23:27 crc kubenswrapper[4956]: I1211 22:23:27.130229 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz7r" event={"ID":"f4af5057-f6ec-4292-a6c8-709b61140ae4","Type":"ContainerDied","Data":"e9bde9a4acc5f10db7d00122dce559c77d6a48e21be30c6f7dc72778c50a2ef0"} Dec 11 22:23:27 crc kubenswrapper[4956]: I1211 22:23:27.130477 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz7r" event={"ID":"f4af5057-f6ec-4292-a6c8-709b61140ae4","Type":"ContainerStarted","Data":"2aa844f58c94ddf17e693b6f36652f776297f9b5a3d1a53121fa26071796a9ac"} Dec 11 22:23:29 crc kubenswrapper[4956]: I1211 22:23:29.149330 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerID="a9259cfdedfcaefb678baea96d2fa26299d6f2bf057de38282a8304d2a9cb206" exitCode=0 Dec 11 22:23:29 crc kubenswrapper[4956]: I1211 22:23:29.149611 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz7r" event={"ID":"f4af5057-f6ec-4292-a6c8-709b61140ae4","Type":"ContainerDied","Data":"a9259cfdedfcaefb678baea96d2fa26299d6f2bf057de38282a8304d2a9cb206"} Dec 11 22:23:30 crc kubenswrapper[4956]: I1211 22:23:30.159535 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz7r" event={"ID":"f4af5057-f6ec-4292-a6c8-709b61140ae4","Type":"ContainerStarted","Data":"0ff750cf033f523ee15fafcf8d90c19723969f918e416d8d5b303d390318d39f"} Dec 11 22:23:30 crc kubenswrapper[4956]: I1211 22:23:30.183897 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4tz7r" podStartSLOduration=2.634021896 podStartE2EDuration="5.18388005s" podCreationTimestamp="2025-12-11 22:23:25 +0000 UTC" firstStartedPulling="2025-12-11 22:23:27.134273618 +0000 UTC m=+2099.578651768" lastFinishedPulling="2025-12-11 22:23:29.684131772 +0000 UTC m=+2102.128509922" observedRunningTime="2025-12-11 22:23:30.177806036 +0000 UTC m=+2102.622184186" watchObservedRunningTime="2025-12-11 22:23:30.18388005 +0000 UTC m=+2102.628258200" Dec 11 22:23:36 crc kubenswrapper[4956]: I1211 22:23:36.047099 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:36 crc kubenswrapper[4956]: I1211 22:23:36.048399 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:36 crc kubenswrapper[4956]: I1211 22:23:36.126086 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:36 crc kubenswrapper[4956]: I1211 22:23:36.247351 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:36 crc kubenswrapper[4956]: I1211 22:23:36.367525 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tz7r"] Dec 11 22:23:38 crc kubenswrapper[4956]: I1211 22:23:38.232474 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4tz7r" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="registry-server" containerID="cri-o://0ff750cf033f523ee15fafcf8d90c19723969f918e416d8d5b303d390318d39f" gracePeriod=2 Dec 11 22:23:39 crc kubenswrapper[4956]: I1211 22:23:39.242525 4956 generic.go:334] "Generic (PLEG): container finished" podID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerID="0ff750cf033f523ee15fafcf8d90c19723969f918e416d8d5b303d390318d39f" exitCode=0 Dec 11 22:23:39 crc kubenswrapper[4956]: I1211 22:23:39.242754 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz7r" event={"ID":"f4af5057-f6ec-4292-a6c8-709b61140ae4","Type":"ContainerDied","Data":"0ff750cf033f523ee15fafcf8d90c19723969f918e416d8d5b303d390318d39f"} Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.219140 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.253326 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tz7r" event={"ID":"f4af5057-f6ec-4292-a6c8-709b61140ae4","Type":"ContainerDied","Data":"2aa844f58c94ddf17e693b6f36652f776297f9b5a3d1a53121fa26071796a9ac"} Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.253393 4956 scope.go:117] "RemoveContainer" containerID="0ff750cf033f523ee15fafcf8d90c19723969f918e416d8d5b303d390318d39f" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.253401 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tz7r" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.275360 4956 scope.go:117] "RemoveContainer" containerID="a9259cfdedfcaefb678baea96d2fa26299d6f2bf057de38282a8304d2a9cb206" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.292424 4956 scope.go:117] "RemoveContainer" containerID="e9bde9a4acc5f10db7d00122dce559c77d6a48e21be30c6f7dc72778c50a2ef0" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.328360 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-catalog-content\") pod \"f4af5057-f6ec-4292-a6c8-709b61140ae4\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.328506 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9j44b\" (UniqueName: \"kubernetes.io/projected/f4af5057-f6ec-4292-a6c8-709b61140ae4-kube-api-access-9j44b\") pod \"f4af5057-f6ec-4292-a6c8-709b61140ae4\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.328975 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-utilities\") pod \"f4af5057-f6ec-4292-a6c8-709b61140ae4\" (UID: \"f4af5057-f6ec-4292-a6c8-709b61140ae4\") " Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.330247 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-utilities" (OuterVolumeSpecName: "utilities") pod "f4af5057-f6ec-4292-a6c8-709b61140ae4" (UID: "f4af5057-f6ec-4292-a6c8-709b61140ae4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.331145 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.335064 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4af5057-f6ec-4292-a6c8-709b61140ae4-kube-api-access-9j44b" (OuterVolumeSpecName: "kube-api-access-9j44b") pod "f4af5057-f6ec-4292-a6c8-709b61140ae4" (UID: "f4af5057-f6ec-4292-a6c8-709b61140ae4"). InnerVolumeSpecName "kube-api-access-9j44b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.380457 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4af5057-f6ec-4292-a6c8-709b61140ae4" (UID: "f4af5057-f6ec-4292-a6c8-709b61140ae4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.433640 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4af5057-f6ec-4292-a6c8-709b61140ae4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.433691 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9j44b\" (UniqueName: \"kubernetes.io/projected/f4af5057-f6ec-4292-a6c8-709b61140ae4-kube-api-access-9j44b\") on node \"crc\" DevicePath \"\"" Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.590488 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tz7r"] Dec 11 22:23:40 crc kubenswrapper[4956]: I1211 22:23:40.600155 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4tz7r"] Dec 11 22:23:42 crc kubenswrapper[4956]: I1211 22:23:42.030354 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" path="/var/lib/kubelet/pods/f4af5057-f6ec-4292-a6c8-709b61140ae4/volumes" Dec 11 22:24:16 crc kubenswrapper[4956]: I1211 22:24:16.888228 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:24:16 crc kubenswrapper[4956]: I1211 22:24:16.889270 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:24:46 crc kubenswrapper[4956]: I1211 22:24:46.888488 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:24:46 crc kubenswrapper[4956]: I1211 22:24:46.889106 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.544011 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ck6fc"] Dec 11 22:24:47 crc kubenswrapper[4956]: E1211 22:24:47.544467 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="registry-server" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.544496 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="registry-server" Dec 11 22:24:47 crc kubenswrapper[4956]: E1211 22:24:47.544522 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="extract-utilities" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.544533 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="extract-utilities" Dec 11 22:24:47 crc kubenswrapper[4956]: E1211 22:24:47.544563 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="extract-content" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.544572 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="extract-content" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.544898 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4af5057-f6ec-4292-a6c8-709b61140ae4" containerName="registry-server" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.547472 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.559564 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-catalog-content\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.559646 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-utilities\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.559736 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9dpw\" (UniqueName: \"kubernetes.io/projected/ba5d8d4a-938c-4bee-b999-068c7d040eea-kube-api-access-l9dpw\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.563638 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ck6fc"] Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.660507 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9dpw\" (UniqueName: \"kubernetes.io/projected/ba5d8d4a-938c-4bee-b999-068c7d040eea-kube-api-access-l9dpw\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.660646 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-catalog-content\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.660677 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-utilities\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.663183 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-utilities\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.663345 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-catalog-content\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.682217 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9dpw\" (UniqueName: \"kubernetes.io/projected/ba5d8d4a-938c-4bee-b999-068c7d040eea-kube-api-access-l9dpw\") pod \"certified-operators-ck6fc\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:47 crc kubenswrapper[4956]: I1211 22:24:47.948871 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:48 crc kubenswrapper[4956]: I1211 22:24:48.411912 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ck6fc"] Dec 11 22:24:48 crc kubenswrapper[4956]: I1211 22:24:48.837640 4956 generic.go:334] "Generic (PLEG): container finished" podID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerID="496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b" exitCode=0 Dec 11 22:24:48 crc kubenswrapper[4956]: I1211 22:24:48.837709 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck6fc" event={"ID":"ba5d8d4a-938c-4bee-b999-068c7d040eea","Type":"ContainerDied","Data":"496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b"} Dec 11 22:24:48 crc kubenswrapper[4956]: I1211 22:24:48.837738 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck6fc" event={"ID":"ba5d8d4a-938c-4bee-b999-068c7d040eea","Type":"ContainerStarted","Data":"486eacdce1c196ee336dc13916e7e0e6cce3dea2b124b6ec008ffb0a7f630631"} Dec 11 22:24:50 crc kubenswrapper[4956]: I1211 22:24:50.857340 4956 generic.go:334] "Generic (PLEG): container finished" podID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerID="ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350" exitCode=0 Dec 11 22:24:50 crc kubenswrapper[4956]: I1211 22:24:50.857402 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck6fc" event={"ID":"ba5d8d4a-938c-4bee-b999-068c7d040eea","Type":"ContainerDied","Data":"ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350"} Dec 11 22:24:53 crc kubenswrapper[4956]: I1211 22:24:52.883435 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck6fc" event={"ID":"ba5d8d4a-938c-4bee-b999-068c7d040eea","Type":"ContainerStarted","Data":"cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300"} Dec 11 22:24:53 crc kubenswrapper[4956]: I1211 22:24:53.068696 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ck6fc" podStartSLOduration=2.782751958 podStartE2EDuration="6.068668637s" podCreationTimestamp="2025-12-11 22:24:47 +0000 UTC" firstStartedPulling="2025-12-11 22:24:48.839355929 +0000 UTC m=+2181.283734089" lastFinishedPulling="2025-12-11 22:24:52.125272578 +0000 UTC m=+2184.569650768" observedRunningTime="2025-12-11 22:24:53.0591412 +0000 UTC m=+2185.503519360" watchObservedRunningTime="2025-12-11 22:24:53.068668637 +0000 UTC m=+2185.513046837" Dec 11 22:24:57 crc kubenswrapper[4956]: I1211 22:24:57.950496 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:57 crc kubenswrapper[4956]: I1211 22:24:57.951567 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:58 crc kubenswrapper[4956]: I1211 22:24:58.038958 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:58 crc kubenswrapper[4956]: I1211 22:24:58.992105 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:24:59 crc kubenswrapper[4956]: I1211 22:24:59.064218 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ck6fc"] Dec 11 22:25:00 crc kubenswrapper[4956]: I1211 22:25:00.955282 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ck6fc" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="registry-server" containerID="cri-o://cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300" gracePeriod=2 Dec 11 22:25:01 crc kubenswrapper[4956]: I1211 22:25:01.967537 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:25:01 crc kubenswrapper[4956]: I1211 22:25:01.970437 4956 generic.go:334] "Generic (PLEG): container finished" podID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerID="cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300" exitCode=0 Dec 11 22:25:01 crc kubenswrapper[4956]: I1211 22:25:01.970530 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck6fc" event={"ID":"ba5d8d4a-938c-4bee-b999-068c7d040eea","Type":"ContainerDied","Data":"cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300"} Dec 11 22:25:01 crc kubenswrapper[4956]: I1211 22:25:01.970595 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ck6fc" event={"ID":"ba5d8d4a-938c-4bee-b999-068c7d040eea","Type":"ContainerDied","Data":"486eacdce1c196ee336dc13916e7e0e6cce3dea2b124b6ec008ffb0a7f630631"} Dec 11 22:25:01 crc kubenswrapper[4956]: I1211 22:25:01.970636 4956 scope.go:117] "RemoveContainer" containerID="cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.005830 4956 scope.go:117] "RemoveContainer" containerID="ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.034601 4956 scope.go:117] "RemoveContainer" containerID="496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.060259 4956 scope.go:117] "RemoveContainer" containerID="cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300" Dec 11 22:25:02 crc kubenswrapper[4956]: E1211 22:25:02.061198 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300\": container with ID starting with cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300 not found: ID does not exist" containerID="cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.061258 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300"} err="failed to get container status \"cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300\": rpc error: code = NotFound desc = could not find container \"cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300\": container with ID starting with cf62e7a5f843984c60df001ec15d6f1ac633429ea8009e9a16aea27721c0d300 not found: ID does not exist" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.061291 4956 scope.go:117] "RemoveContainer" containerID="ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350" Dec 11 22:25:02 crc kubenswrapper[4956]: E1211 22:25:02.061884 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350\": container with ID starting with ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350 not found: ID does not exist" containerID="ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.061961 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350"} err="failed to get container status \"ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350\": rpc error: code = NotFound desc = could not find container \"ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350\": container with ID starting with ab0becdb3dafd0deb4d7c397f84698a4bc1452a71fe662d0a4299ec5b8927350 not found: ID does not exist" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.062007 4956 scope.go:117] "RemoveContainer" containerID="496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b" Dec 11 22:25:02 crc kubenswrapper[4956]: E1211 22:25:02.062477 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b\": container with ID starting with 496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b not found: ID does not exist" containerID="496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.062513 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b"} err="failed to get container status \"496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b\": rpc error: code = NotFound desc = could not find container \"496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b\": container with ID starting with 496b733239158bb0f7ffdf1ebc8f5c3b4bf0f725d1a01593a50cce798701e97b not found: ID does not exist" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.089340 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-utilities\") pod \"ba5d8d4a-938c-4bee-b999-068c7d040eea\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.089491 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9dpw\" (UniqueName: \"kubernetes.io/projected/ba5d8d4a-938c-4bee-b999-068c7d040eea-kube-api-access-l9dpw\") pod \"ba5d8d4a-938c-4bee-b999-068c7d040eea\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.089595 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-catalog-content\") pod \"ba5d8d4a-938c-4bee-b999-068c7d040eea\" (UID: \"ba5d8d4a-938c-4bee-b999-068c7d040eea\") " Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.091427 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-utilities" (OuterVolumeSpecName: "utilities") pod "ba5d8d4a-938c-4bee-b999-068c7d040eea" (UID: "ba5d8d4a-938c-4bee-b999-068c7d040eea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.095792 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba5d8d4a-938c-4bee-b999-068c7d040eea-kube-api-access-l9dpw" (OuterVolumeSpecName: "kube-api-access-l9dpw") pod "ba5d8d4a-938c-4bee-b999-068c7d040eea" (UID: "ba5d8d4a-938c-4bee-b999-068c7d040eea"). InnerVolumeSpecName "kube-api-access-l9dpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.150517 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba5d8d4a-938c-4bee-b999-068c7d040eea" (UID: "ba5d8d4a-938c-4bee-b999-068c7d040eea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.191515 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.191548 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9dpw\" (UniqueName: \"kubernetes.io/projected/ba5d8d4a-938c-4bee-b999-068c7d040eea-kube-api-access-l9dpw\") on node \"crc\" DevicePath \"\"" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.191560 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5d8d4a-938c-4bee-b999-068c7d040eea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:25:02 crc kubenswrapper[4956]: I1211 22:25:02.981260 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ck6fc" Dec 11 22:25:03 crc kubenswrapper[4956]: I1211 22:25:03.038398 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ck6fc"] Dec 11 22:25:03 crc kubenswrapper[4956]: I1211 22:25:03.047849 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ck6fc"] Dec 11 22:25:04 crc kubenswrapper[4956]: I1211 22:25:04.037399 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" path="/var/lib/kubelet/pods/ba5d8d4a-938c-4bee-b999-068c7d040eea/volumes" Dec 11 22:25:16 crc kubenswrapper[4956]: I1211 22:25:16.888759 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:25:16 crc kubenswrapper[4956]: I1211 22:25:16.889559 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:25:16 crc kubenswrapper[4956]: I1211 22:25:16.889634 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:25:16 crc kubenswrapper[4956]: I1211 22:25:16.890671 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31a2b79ec593947d626df8fd6abfea728f54de214ebbeb9d2857540713c4aad5"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:25:16 crc kubenswrapper[4956]: I1211 22:25:16.890819 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://31a2b79ec593947d626df8fd6abfea728f54de214ebbeb9d2857540713c4aad5" gracePeriod=600 Dec 11 22:25:17 crc kubenswrapper[4956]: I1211 22:25:17.128807 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"31a2b79ec593947d626df8fd6abfea728f54de214ebbeb9d2857540713c4aad5"} Dec 11 22:25:17 crc kubenswrapper[4956]: I1211 22:25:17.129117 4956 scope.go:117] "RemoveContainer" containerID="55d6b3d4ad76f275bbb0afacacacc0e4f42c1b3352120437ad1625c9d521caf7" Dec 11 22:25:17 crc kubenswrapper[4956]: I1211 22:25:17.128862 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="31a2b79ec593947d626df8fd6abfea728f54de214ebbeb9d2857540713c4aad5" exitCode=0 Dec 11 22:25:18 crc kubenswrapper[4956]: I1211 22:25:18.141694 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648"} Dec 11 22:27:46 crc kubenswrapper[4956]: I1211 22:27:46.888413 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:27:46 crc kubenswrapper[4956]: I1211 22:27:46.889175 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:28:16 crc kubenswrapper[4956]: I1211 22:28:16.888519 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:28:16 crc kubenswrapper[4956]: I1211 22:28:16.889197 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:28:46 crc kubenswrapper[4956]: I1211 22:28:46.888142 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:28:46 crc kubenswrapper[4956]: I1211 22:28:46.888737 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 22:28:46 crc kubenswrapper[4956]: I1211 22:28:46.888811 4956 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" Dec 11 22:28:46 crc kubenswrapper[4956]: I1211 22:28:46.889529 4956 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648"} pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 22:28:46 crc kubenswrapper[4956]: I1211 22:28:46.889588 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" containerID="cri-o://5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" gracePeriod=600 Dec 11 22:28:47 crc kubenswrapper[4956]: E1211 22:28:47.544023 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:28:48 crc kubenswrapper[4956]: I1211 22:28:48.055980 4956 generic.go:334] "Generic (PLEG): container finished" podID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" exitCode=0 Dec 11 22:28:48 crc kubenswrapper[4956]: I1211 22:28:48.056038 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerDied","Data":"5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648"} Dec 11 22:28:48 crc kubenswrapper[4956]: I1211 22:28:48.056076 4956 scope.go:117] "RemoveContainer" containerID="31a2b79ec593947d626df8fd6abfea728f54de214ebbeb9d2857540713c4aad5" Dec 11 22:28:48 crc kubenswrapper[4956]: I1211 22:28:48.056752 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:28:48 crc kubenswrapper[4956]: E1211 22:28:48.057109 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:29:01 crc kubenswrapper[4956]: I1211 22:29:01.020859 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:29:01 crc kubenswrapper[4956]: E1211 22:29:01.022530 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:29:13 crc kubenswrapper[4956]: I1211 22:29:13.021462 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:29:13 crc kubenswrapper[4956]: E1211 22:29:13.022168 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:29:27 crc kubenswrapper[4956]: I1211 22:29:27.021231 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:29:27 crc kubenswrapper[4956]: E1211 22:29:27.022160 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:29:38 crc kubenswrapper[4956]: I1211 22:29:38.032575 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:29:38 crc kubenswrapper[4956]: E1211 22:29:38.033860 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.763156 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-22hfd/must-gather-qncql"] Dec 11 22:29:46 crc kubenswrapper[4956]: E1211 22:29:46.764060 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="registry-server" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.764076 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="registry-server" Dec 11 22:29:46 crc kubenswrapper[4956]: E1211 22:29:46.764095 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="extract-utilities" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.764103 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="extract-utilities" Dec 11 22:29:46 crc kubenswrapper[4956]: E1211 22:29:46.764129 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="extract-content" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.764137 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="extract-content" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.764311 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba5d8d4a-938c-4bee-b999-068c7d040eea" containerName="registry-server" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.765303 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.774512 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-22hfd"/"default-dockercfg-nw7gd" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.775132 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-22hfd"/"openshift-service-ca.crt" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.775818 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-22hfd"/"kube-root-ca.crt" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.823048 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-must-gather-output\") pod \"must-gather-qncql\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.823303 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5rqf\" (UniqueName: \"kubernetes.io/projected/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-kube-api-access-v5rqf\") pod \"must-gather-qncql\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.846514 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-22hfd/must-gather-qncql"] Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.924617 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-must-gather-output\") pod \"must-gather-qncql\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.924716 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5rqf\" (UniqueName: \"kubernetes.io/projected/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-kube-api-access-v5rqf\") pod \"must-gather-qncql\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.925100 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-must-gather-output\") pod \"must-gather-qncql\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:46 crc kubenswrapper[4956]: I1211 22:29:46.945481 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5rqf\" (UniqueName: \"kubernetes.io/projected/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-kube-api-access-v5rqf\") pod \"must-gather-qncql\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:47 crc kubenswrapper[4956]: I1211 22:29:47.091060 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:29:47 crc kubenswrapper[4956]: I1211 22:29:47.557195 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-22hfd/must-gather-qncql"] Dec 11 22:29:47 crc kubenswrapper[4956]: W1211 22:29:47.566144 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6aaddfb_38ee_494d_abaf_4a87a02d6d97.slice/crio-7bada7ce42e9dcdfe527389ca23737bb228dbeae190402e5fc6ece98f89bfb74 WatchSource:0}: Error finding container 7bada7ce42e9dcdfe527389ca23737bb228dbeae190402e5fc6ece98f89bfb74: Status 404 returned error can't find the container with id 7bada7ce42e9dcdfe527389ca23737bb228dbeae190402e5fc6ece98f89bfb74 Dec 11 22:29:47 crc kubenswrapper[4956]: I1211 22:29:47.569930 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 22:29:48 crc kubenswrapper[4956]: I1211 22:29:48.537894 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-22hfd/must-gather-qncql" event={"ID":"a6aaddfb-38ee-494d-abaf-4a87a02d6d97","Type":"ContainerStarted","Data":"7bada7ce42e9dcdfe527389ca23737bb228dbeae190402e5fc6ece98f89bfb74"} Dec 11 22:29:51 crc kubenswrapper[4956]: I1211 22:29:51.021387 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:29:51 crc kubenswrapper[4956]: E1211 22:29:51.022221 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:29:54 crc kubenswrapper[4956]: I1211 22:29:54.581365 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-22hfd/must-gather-qncql" event={"ID":"a6aaddfb-38ee-494d-abaf-4a87a02d6d97","Type":"ContainerStarted","Data":"76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04"} Dec 11 22:29:54 crc kubenswrapper[4956]: I1211 22:29:54.581706 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-22hfd/must-gather-qncql" event={"ID":"a6aaddfb-38ee-494d-abaf-4a87a02d6d97","Type":"ContainerStarted","Data":"a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3"} Dec 11 22:29:54 crc kubenswrapper[4956]: I1211 22:29:54.598480 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-22hfd/must-gather-qncql" podStartSLOduration=2.382537164 podStartE2EDuration="8.598461541s" podCreationTimestamp="2025-12-11 22:29:46 +0000 UTC" firstStartedPulling="2025-12-11 22:29:47.56964886 +0000 UTC m=+2480.014027020" lastFinishedPulling="2025-12-11 22:29:53.785573247 +0000 UTC m=+2486.229951397" observedRunningTime="2025-12-11 22:29:54.598451291 +0000 UTC m=+2487.042829441" watchObservedRunningTime="2025-12-11 22:29:54.598461541 +0000 UTC m=+2487.042839691" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.147030 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp"] Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.148842 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.150873 4956 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.151071 4956 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.155406 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp"] Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.261681 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ba7f0cc-c913-4713-a336-11b45f7b1820-config-volume\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.261738 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7jzf\" (UniqueName: \"kubernetes.io/projected/1ba7f0cc-c913-4713-a336-11b45f7b1820-kube-api-access-m7jzf\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.261778 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ba7f0cc-c913-4713-a336-11b45f7b1820-secret-volume\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.363997 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ba7f0cc-c913-4713-a336-11b45f7b1820-config-volume\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.364099 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7jzf\" (UniqueName: \"kubernetes.io/projected/1ba7f0cc-c913-4713-a336-11b45f7b1820-kube-api-access-m7jzf\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.364196 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ba7f0cc-c913-4713-a336-11b45f7b1820-secret-volume\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.365302 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ba7f0cc-c913-4713-a336-11b45f7b1820-config-volume\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.371140 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ba7f0cc-c913-4713-a336-11b45f7b1820-secret-volume\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.380715 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7jzf\" (UniqueName: \"kubernetes.io/projected/1ba7f0cc-c913-4713-a336-11b45f7b1820-kube-api-access-m7jzf\") pod \"collect-profiles-29424870-tmcrp\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.480529 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:00 crc kubenswrapper[4956]: I1211 22:30:00.934979 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp"] Dec 11 22:30:01 crc kubenswrapper[4956]: I1211 22:30:01.655589 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" event={"ID":"1ba7f0cc-c913-4713-a336-11b45f7b1820","Type":"ContainerStarted","Data":"70995e52fd0d08d0b49233ea89ffde8cb86878b24b19c6f05bb6f71facc10a91"} Dec 11 22:30:03 crc kubenswrapper[4956]: I1211 22:30:03.021785 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:30:03 crc kubenswrapper[4956]: E1211 22:30:03.022017 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:30:03 crc kubenswrapper[4956]: I1211 22:30:03.675403 4956 generic.go:334] "Generic (PLEG): container finished" podID="1ba7f0cc-c913-4713-a336-11b45f7b1820" containerID="19cf4e3ce892656c5c6ff829d963c26c4539c81c7294289e67890cdeefad83f6" exitCode=0 Dec 11 22:30:03 crc kubenswrapper[4956]: I1211 22:30:03.675513 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" event={"ID":"1ba7f0cc-c913-4713-a336-11b45f7b1820","Type":"ContainerDied","Data":"19cf4e3ce892656c5c6ff829d963c26c4539c81c7294289e67890cdeefad83f6"} Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.006399 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.168827 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7jzf\" (UniqueName: \"kubernetes.io/projected/1ba7f0cc-c913-4713-a336-11b45f7b1820-kube-api-access-m7jzf\") pod \"1ba7f0cc-c913-4713-a336-11b45f7b1820\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.169001 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ba7f0cc-c913-4713-a336-11b45f7b1820-config-volume\") pod \"1ba7f0cc-c913-4713-a336-11b45f7b1820\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.169090 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ba7f0cc-c913-4713-a336-11b45f7b1820-secret-volume\") pod \"1ba7f0cc-c913-4713-a336-11b45f7b1820\" (UID: \"1ba7f0cc-c913-4713-a336-11b45f7b1820\") " Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.170709 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ba7f0cc-c913-4713-a336-11b45f7b1820-config-volume" (OuterVolumeSpecName: "config-volume") pod "1ba7f0cc-c913-4713-a336-11b45f7b1820" (UID: "1ba7f0cc-c913-4713-a336-11b45f7b1820"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.180347 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ba7f0cc-c913-4713-a336-11b45f7b1820-kube-api-access-m7jzf" (OuterVolumeSpecName: "kube-api-access-m7jzf") pod "1ba7f0cc-c913-4713-a336-11b45f7b1820" (UID: "1ba7f0cc-c913-4713-a336-11b45f7b1820"). InnerVolumeSpecName "kube-api-access-m7jzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.190609 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ba7f0cc-c913-4713-a336-11b45f7b1820-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1ba7f0cc-c913-4713-a336-11b45f7b1820" (UID: "1ba7f0cc-c913-4713-a336-11b45f7b1820"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.271691 4956 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ba7f0cc-c913-4713-a336-11b45f7b1820-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.271949 4956 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ba7f0cc-c913-4713-a336-11b45f7b1820-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.271964 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7jzf\" (UniqueName: \"kubernetes.io/projected/1ba7f0cc-c913-4713-a336-11b45f7b1820-kube-api-access-m7jzf\") on node \"crc\" DevicePath \"\"" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.705866 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" event={"ID":"1ba7f0cc-c913-4713-a336-11b45f7b1820","Type":"ContainerDied","Data":"70995e52fd0d08d0b49233ea89ffde8cb86878b24b19c6f05bb6f71facc10a91"} Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.705904 4956 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70995e52fd0d08d0b49233ea89ffde8cb86878b24b19c6f05bb6f71facc10a91" Dec 11 22:30:05 crc kubenswrapper[4956]: I1211 22:30:05.705944 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424870-tmcrp" Dec 11 22:30:06 crc kubenswrapper[4956]: I1211 22:30:06.073465 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm"] Dec 11 22:30:06 crc kubenswrapper[4956]: I1211 22:30:06.078397 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424825-fsrlm"] Dec 11 22:30:08 crc kubenswrapper[4956]: I1211 22:30:08.030141 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b39bb8fb-c84a-48cc-aa65-b992c06a090b" path="/var/lib/kubelet/pods/b39bb8fb-c84a-48cc-aa65-b992c06a090b/volumes" Dec 11 22:30:15 crc kubenswrapper[4956]: I1211 22:30:15.021290 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:30:15 crc kubenswrapper[4956]: E1211 22:30:15.022152 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.021598 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:30:30 crc kubenswrapper[4956]: E1211 22:30:30.022330 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.188523 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/util/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.347349 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/pull/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.358030 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/util/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.364353 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/pull/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.544740 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/util/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.563163 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/pull/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.588013 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_3a98642acbb75807b74490f0bb5474366111a68efb68fa32ceb57fcd40p57cm_d1b83c20-2930-4926-9a24-84a05bfe56a9/extract/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.704060 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/util/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.889888 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/util/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.908746 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/pull/0.log" Dec 11 22:30:30 crc kubenswrapper[4956]: I1211 22:30:30.923380 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/pull/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.058473 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/pull/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.061936 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/util/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.069839 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_43e5644c60cb869ede2604af8f1e62bf3b57250e28db8bdd45d8e2cb6fgvrfr_868cfe57-acca-413d-a5fc-3c856d30ac3f/extract/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.200383 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/util/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.359986 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/util/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.368545 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/pull/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.392182 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/pull/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.523402 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/util/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.535638 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/pull/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.541805 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590zmq8s_3f33b928-05d6-489c-a9d0-1c23f69a7849/extract/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.668806 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/util/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.859910 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/util/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.865911 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/pull/0.log" Dec 11 22:30:31 crc kubenswrapper[4956]: I1211 22:30:31.866294 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/pull/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.048702 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/pull/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.053846 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/extract/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.070880 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a60eba3f147c507a234cb91c07236b96c6864b9bfe0f3df79307df7f1495vr7_7a89c45b-e195-4012-a532-aa4430a52d63/util/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.265807 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5879767bf-gh5wn_7a3464df-42e9-4fb4-94ae-fadc7acc42ea/manager/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.310308 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-index-2mpd9_5f81ac27-4de7-4fec-bd58-51936767a898/registry-server/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.408446 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/util/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.545860 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/pull/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.565035 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/util/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.574867 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/pull/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.730232 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/extract/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.735968 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/util/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.751520 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bddc449b924cd626b11986a7f08bcf506455bf24815f9811541fc36a9afsg82_dfb081fe-bb0f-4b1b-ad99-12a07bf52c29/pull/0.log" Dec 11 22:30:32 crc kubenswrapper[4956]: I1211 22:30:32.891663 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/util/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.058621 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/util/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.085749 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/pull/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.133577 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/pull/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.305237 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/util/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.305863 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/extract/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.315075 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c983d374f65f2b6519291d515c946904d120154363674398ca5d68168dk77n5_4d696972-48a5-4dd3-8a23-a320ea760628/pull/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.390831 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7b44544c9d-fw298_2c91f172-3850-43c1-b558-d0c87f7e2797/manager/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.557731 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-index-2pv8c_fddb0340-f5e5-46e1-8193-236b99c5e859/registry-server/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.621014 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-794987f786-cml4t_015cecc7-360c-4399-83c5-f13692a5e145/manager/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.729159 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-index-sn9vb_0193dd7c-70b9-4978-b06a-878e08014d5c/registry-server/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.768811 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-556fbb47f6-t77m6_e6567d3b-2f90-413a-b7fc-edd972521754/manager/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.866637 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-index-2gr5s_e625ed2d-e5fa-44e0-9388-9a3df15fe132/registry-server/0.log" Dec 11 22:30:33 crc kubenswrapper[4956]: I1211 22:30:33.963928 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-779fc9694b-ssktt_33dc9fa8-0749-48a8-a1dd-ac9b438fc6c1/operator/0.log" Dec 11 22:30:34 crc kubenswrapper[4956]: I1211 22:30:34.003214 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-index-nrbcn_c8f134b9-199f-44f5-b6d4-2fd3b7766db7/registry-server/0.log" Dec 11 22:30:34 crc kubenswrapper[4956]: I1211 22:30:34.124829 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8946f6b66-758s2_7a35e2a8-1fe7-45be-8a6f-c1eab1761b13/manager/0.log" Dec 11 22:30:34 crc kubenswrapper[4956]: I1211 22:30:34.149402 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-index-gg48q_e1af6044-d7ab-46d3-8bc9-cf8a341be1d6/registry-server/0.log" Dec 11 22:30:35 crc kubenswrapper[4956]: I1211 22:30:35.163853 4956 scope.go:117] "RemoveContainer" containerID="cb24e1ea9f15362e076ca5767d1f9ead26156ff22e051077c0197e828a3da69d" Dec 11 22:30:41 crc kubenswrapper[4956]: I1211 22:30:41.021545 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:30:41 crc kubenswrapper[4956]: E1211 22:30:41.022393 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.674173 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xv7pd"] Dec 11 22:30:44 crc kubenswrapper[4956]: E1211 22:30:44.674945 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ba7f0cc-c913-4713-a336-11b45f7b1820" containerName="collect-profiles" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.674961 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ba7f0cc-c913-4713-a336-11b45f7b1820" containerName="collect-profiles" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.675371 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ba7f0cc-c913-4713-a336-11b45f7b1820" containerName="collect-profiles" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.677870 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.714156 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xv7pd"] Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.789067 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-catalog-content\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.789142 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvt59\" (UniqueName: \"kubernetes.io/projected/d84882a0-637c-43af-b362-998d4738fb09-kube-api-access-dvt59\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.789192 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-utilities\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.891053 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvt59\" (UniqueName: \"kubernetes.io/projected/d84882a0-637c-43af-b362-998d4738fb09-kube-api-access-dvt59\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.891141 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-utilities\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.891230 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-catalog-content\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.891700 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-catalog-content\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.892315 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-utilities\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:44 crc kubenswrapper[4956]: I1211 22:30:44.919836 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvt59\" (UniqueName: \"kubernetes.io/projected/d84882a0-637c-43af-b362-998d4738fb09-kube-api-access-dvt59\") pod \"redhat-operators-xv7pd\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:45 crc kubenswrapper[4956]: I1211 22:30:45.018492 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:45 crc kubenswrapper[4956]: I1211 22:30:45.259013 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xv7pd"] Dec 11 22:30:45 crc kubenswrapper[4956]: I1211 22:30:45.987453 4956 generic.go:334] "Generic (PLEG): container finished" podID="d84882a0-637c-43af-b362-998d4738fb09" containerID="7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb" exitCode=0 Dec 11 22:30:45 crc kubenswrapper[4956]: I1211 22:30:45.987757 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerDied","Data":"7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb"} Dec 11 22:30:45 crc kubenswrapper[4956]: I1211 22:30:45.987806 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerStarted","Data":"8453f21363d13265efe6ee92dba4c506913c5649348289731e7154c7f57bf5fa"} Dec 11 22:30:46 crc kubenswrapper[4956]: I1211 22:30:46.996924 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerStarted","Data":"a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652"} Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.006203 4956 generic.go:334] "Generic (PLEG): container finished" podID="d84882a0-637c-43af-b362-998d4738fb09" containerID="a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652" exitCode=0 Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.006253 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerDied","Data":"a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652"} Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.431588 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f2f7k"] Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.433783 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.449320 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2f7k"] Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.543542 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf95143-90a0-4f1d-a62b-aad64c96ede4-catalog-content\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.543639 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjnfn\" (UniqueName: \"kubernetes.io/projected/abf95143-90a0-4f1d-a62b-aad64c96ede4-kube-api-access-kjnfn\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.543662 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf95143-90a0-4f1d-a62b-aad64c96ede4-utilities\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.644897 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf95143-90a0-4f1d-a62b-aad64c96ede4-catalog-content\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.644997 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjnfn\" (UniqueName: \"kubernetes.io/projected/abf95143-90a0-4f1d-a62b-aad64c96ede4-kube-api-access-kjnfn\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.645018 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf95143-90a0-4f1d-a62b-aad64c96ede4-utilities\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.645531 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abf95143-90a0-4f1d-a62b-aad64c96ede4-utilities\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.645808 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abf95143-90a0-4f1d-a62b-aad64c96ede4-catalog-content\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.665801 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjnfn\" (UniqueName: \"kubernetes.io/projected/abf95143-90a0-4f1d-a62b-aad64c96ede4-kube-api-access-kjnfn\") pod \"redhat-marketplace-f2f7k\" (UID: \"abf95143-90a0-4f1d-a62b-aad64c96ede4\") " pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.812506 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:30:48 crc kubenswrapper[4956]: I1211 22:30:48.847556 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-wnxdh_2d8a22dd-465c-4327-8d76-782e5d289942/control-plane-machine-set-operator/0.log" Dec 11 22:30:49 crc kubenswrapper[4956]: I1211 22:30:49.096164 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2f7k"] Dec 11 22:30:49 crc kubenswrapper[4956]: W1211 22:30:49.103158 4956 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabf95143_90a0_4f1d_a62b_aad64c96ede4.slice/crio-927c1c79f8b43d8a1d9ff0bc4c2090b0bec01efb5255cb00117973b039ad3720 WatchSource:0}: Error finding container 927c1c79f8b43d8a1d9ff0bc4c2090b0bec01efb5255cb00117973b039ad3720: Status 404 returned error can't find the container with id 927c1c79f8b43d8a1d9ff0bc4c2090b0bec01efb5255cb00117973b039ad3720 Dec 11 22:30:49 crc kubenswrapper[4956]: I1211 22:30:49.145210 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dkst5_99b3c2ef-0c86-427c-9c97-f4a9221b69b1/kube-rbac-proxy/0.log" Dec 11 22:30:49 crc kubenswrapper[4956]: I1211 22:30:49.293931 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dkst5_99b3c2ef-0c86-427c-9c97-f4a9221b69b1/machine-api-operator/0.log" Dec 11 22:30:50 crc kubenswrapper[4956]: I1211 22:30:50.033248 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerStarted","Data":"e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a"} Dec 11 22:30:50 crc kubenswrapper[4956]: I1211 22:30:50.033580 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2f7k" event={"ID":"abf95143-90a0-4f1d-a62b-aad64c96ede4","Type":"ContainerStarted","Data":"7f6859dba2d90b3e45256236d7b72bc177f9fd5a56269b1c5d1e9657c63600b2"} Dec 11 22:30:50 crc kubenswrapper[4956]: I1211 22:30:50.033599 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2f7k" event={"ID":"abf95143-90a0-4f1d-a62b-aad64c96ede4","Type":"ContainerStarted","Data":"927c1c79f8b43d8a1d9ff0bc4c2090b0bec01efb5255cb00117973b039ad3720"} Dec 11 22:30:50 crc kubenswrapper[4956]: I1211 22:30:50.047169 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xv7pd" podStartSLOduration=2.67309013 podStartE2EDuration="6.047151031s" podCreationTimestamp="2025-12-11 22:30:44 +0000 UTC" firstStartedPulling="2025-12-11 22:30:45.989282223 +0000 UTC m=+2538.433660373" lastFinishedPulling="2025-12-11 22:30:49.363343124 +0000 UTC m=+2541.807721274" observedRunningTime="2025-12-11 22:30:50.044503439 +0000 UTC m=+2542.488881589" watchObservedRunningTime="2025-12-11 22:30:50.047151031 +0000 UTC m=+2542.491529181" Dec 11 22:30:52 crc kubenswrapper[4956]: I1211 22:30:52.059081 4956 generic.go:334] "Generic (PLEG): container finished" podID="abf95143-90a0-4f1d-a62b-aad64c96ede4" containerID="7f6859dba2d90b3e45256236d7b72bc177f9fd5a56269b1c5d1e9657c63600b2" exitCode=0 Dec 11 22:30:52 crc kubenswrapper[4956]: I1211 22:30:52.060114 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2f7k" event={"ID":"abf95143-90a0-4f1d-a62b-aad64c96ede4","Type":"ContainerDied","Data":"7f6859dba2d90b3e45256236d7b72bc177f9fd5a56269b1c5d1e9657c63600b2"} Dec 11 22:30:53 crc kubenswrapper[4956]: I1211 22:30:53.021247 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:30:53 crc kubenswrapper[4956]: E1211 22:30:53.021509 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:30:55 crc kubenswrapper[4956]: I1211 22:30:55.019221 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:55 crc kubenswrapper[4956]: I1211 22:30:55.019547 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:30:56 crc kubenswrapper[4956]: I1211 22:30:56.063408 4956 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xv7pd" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="registry-server" probeResult="failure" output=< Dec 11 22:30:56 crc kubenswrapper[4956]: timeout: failed to connect service ":50051" within 1s Dec 11 22:30:56 crc kubenswrapper[4956]: > Dec 11 22:30:58 crc kubenswrapper[4956]: I1211 22:30:58.102515 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2f7k" event={"ID":"abf95143-90a0-4f1d-a62b-aad64c96ede4","Type":"ContainerStarted","Data":"e30d2393172e0251ce6ae2a1dc515f123e6544ba0ac417c624a216bb090c2b22"} Dec 11 22:30:59 crc kubenswrapper[4956]: I1211 22:30:59.111439 4956 generic.go:334] "Generic (PLEG): container finished" podID="abf95143-90a0-4f1d-a62b-aad64c96ede4" containerID="e30d2393172e0251ce6ae2a1dc515f123e6544ba0ac417c624a216bb090c2b22" exitCode=0 Dec 11 22:30:59 crc kubenswrapper[4956]: I1211 22:30:59.111508 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2f7k" event={"ID":"abf95143-90a0-4f1d-a62b-aad64c96ede4","Type":"ContainerDied","Data":"e30d2393172e0251ce6ae2a1dc515f123e6544ba0ac417c624a216bb090c2b22"} Dec 11 22:31:00 crc kubenswrapper[4956]: I1211 22:31:00.121000 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f2f7k" event={"ID":"abf95143-90a0-4f1d-a62b-aad64c96ede4","Type":"ContainerStarted","Data":"af64b949650251ad4ce4ff0173f9369e0abc5bd9af814c6d491230d434142dda"} Dec 11 22:31:00 crc kubenswrapper[4956]: I1211 22:31:00.138398 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f2f7k" podStartSLOduration=4.593733622 podStartE2EDuration="12.138374036s" podCreationTimestamp="2025-12-11 22:30:48 +0000 UTC" firstStartedPulling="2025-12-11 22:30:52.06247036 +0000 UTC m=+2544.506848510" lastFinishedPulling="2025-12-11 22:30:59.607110764 +0000 UTC m=+2552.051488924" observedRunningTime="2025-12-11 22:31:00.13594002 +0000 UTC m=+2552.580318190" watchObservedRunningTime="2025-12-11 22:31:00.138374036 +0000 UTC m=+2552.582752186" Dec 11 22:31:05 crc kubenswrapper[4956]: I1211 22:31:05.070910 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:31:05 crc kubenswrapper[4956]: I1211 22:31:05.121252 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:31:05 crc kubenswrapper[4956]: I1211 22:31:05.303134 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xv7pd"] Dec 11 22:31:06 crc kubenswrapper[4956]: I1211 22:31:06.021390 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:31:06 crc kubenswrapper[4956]: E1211 22:31:06.021823 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:31:06 crc kubenswrapper[4956]: I1211 22:31:06.176661 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xv7pd" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="registry-server" containerID="cri-o://e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a" gracePeriod=2 Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.103426 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.123203 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-utilities\") pod \"d84882a0-637c-43af-b362-998d4738fb09\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.123268 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-catalog-content\") pod \"d84882a0-637c-43af-b362-998d4738fb09\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.123338 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvt59\" (UniqueName: \"kubernetes.io/projected/d84882a0-637c-43af-b362-998d4738fb09-kube-api-access-dvt59\") pod \"d84882a0-637c-43af-b362-998d4738fb09\" (UID: \"d84882a0-637c-43af-b362-998d4738fb09\") " Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.123865 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-utilities" (OuterVolumeSpecName: "utilities") pod "d84882a0-637c-43af-b362-998d4738fb09" (UID: "d84882a0-637c-43af-b362-998d4738fb09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.159574 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d84882a0-637c-43af-b362-998d4738fb09-kube-api-access-dvt59" (OuterVolumeSpecName: "kube-api-access-dvt59") pod "d84882a0-637c-43af-b362-998d4738fb09" (UID: "d84882a0-637c-43af-b362-998d4738fb09"). InnerVolumeSpecName "kube-api-access-dvt59". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.185962 4956 generic.go:334] "Generic (PLEG): container finished" podID="d84882a0-637c-43af-b362-998d4738fb09" containerID="e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a" exitCode=0 Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.186010 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xv7pd" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.186010 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerDied","Data":"e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a"} Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.186052 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xv7pd" event={"ID":"d84882a0-637c-43af-b362-998d4738fb09","Type":"ContainerDied","Data":"8453f21363d13265efe6ee92dba4c506913c5649348289731e7154c7f57bf5fa"} Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.186091 4956 scope.go:117] "RemoveContainer" containerID="e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.203002 4956 scope.go:117] "RemoveContainer" containerID="a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.231658 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.231698 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvt59\" (UniqueName: \"kubernetes.io/projected/d84882a0-637c-43af-b362-998d4738fb09-kube-api-access-dvt59\") on node \"crc\" DevicePath \"\"" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.237206 4956 scope.go:117] "RemoveContainer" containerID="7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.246720 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d84882a0-637c-43af-b362-998d4738fb09" (UID: "d84882a0-637c-43af-b362-998d4738fb09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.269569 4956 scope.go:117] "RemoveContainer" containerID="e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a" Dec 11 22:31:07 crc kubenswrapper[4956]: E1211 22:31:07.270122 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a\": container with ID starting with e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a not found: ID does not exist" containerID="e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.270198 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a"} err="failed to get container status \"e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a\": rpc error: code = NotFound desc = could not find container \"e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a\": container with ID starting with e15564f3c497bf40a6476b97e0136a6cb62b843d9adfc1f6630a875001365c0a not found: ID does not exist" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.270232 4956 scope.go:117] "RemoveContainer" containerID="a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652" Dec 11 22:31:07 crc kubenswrapper[4956]: E1211 22:31:07.270534 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652\": container with ID starting with a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652 not found: ID does not exist" containerID="a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.270582 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652"} err="failed to get container status \"a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652\": rpc error: code = NotFound desc = could not find container \"a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652\": container with ID starting with a0a8d9c3dc14f541dc9cb30ad9496dde7dc0c00d573e6b8718990c843670a652 not found: ID does not exist" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.270612 4956 scope.go:117] "RemoveContainer" containerID="7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb" Dec 11 22:31:07 crc kubenswrapper[4956]: E1211 22:31:07.272106 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb\": container with ID starting with 7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb not found: ID does not exist" containerID="7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.272149 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb"} err="failed to get container status \"7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb\": rpc error: code = NotFound desc = could not find container \"7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb\": container with ID starting with 7606dd24c0cf7e6ce4ba06383577895ffa495496be412b0274e1bd83c1f3edfb not found: ID does not exist" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.333386 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d84882a0-637c-43af-b362-998d4738fb09-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.518565 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xv7pd"] Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.525150 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xv7pd"] Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.945093 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-228xh_8a6e6699-b773-4761-8438-23abc4eedb21/kube-rbac-proxy/0.log" Dec 11 22:31:07 crc kubenswrapper[4956]: I1211 22:31:07.951174 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-228xh_8a6e6699-b773-4761-8438-23abc4eedb21/controller/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.032285 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d84882a0-637c-43af-b362-998d4738fb09" path="/var/lib/kubelet/pods/d84882a0-637c-43af-b362-998d4738fb09/volumes" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.088645 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-frr-files/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.262860 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-frr-files/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.286830 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-metrics/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.287171 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-reloader/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.359986 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-reloader/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.549919 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-reloader/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.574783 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-metrics/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.591642 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-frr-files/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.597428 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-metrics/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.737170 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-frr-files/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.777631 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/controller/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.778277 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-reloader/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.785233 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/cp-metrics/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.813556 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.813597 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.856219 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.938260 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/frr-metrics/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.939021 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/kube-rbac-proxy-frr/0.log" Dec 11 22:31:08 crc kubenswrapper[4956]: I1211 22:31:08.999853 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/kube-rbac-proxy/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.266356 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f2f7k" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.363409 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/reloader/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.417789 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-6xxgk_cf42ac15-e428-4c85-a2fc-25819760ec60/frr-k8s-webhook-server/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.460448 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-98gk5_d6ca2e06-8bbd-43dc-8945-3004713f92cb/frr/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.562390 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-59698c9c66-vrvkn_92356f98-4ebe-4b75-8703-1518fc3ca16a/manager/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.673503 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-767fcd8485-zvd7d_5d3dcb2a-f97d-419e-8121-c9c049e3bbad/webhook-server/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.780981 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-n474l_31d06e79-18e3-4d0b-a871-365f9f2ee701/kube-rbac-proxy/0.log" Dec 11 22:31:09 crc kubenswrapper[4956]: I1211 22:31:09.936442 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-n474l_31d06e79-18e3-4d0b-a871-365f9f2ee701/speaker/0.log" Dec 11 22:31:10 crc kubenswrapper[4956]: I1211 22:31:10.532053 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f2f7k"] Dec 11 22:31:10 crc kubenswrapper[4956]: I1211 22:31:10.704468 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzj5l"] Dec 11 22:31:10 crc kubenswrapper[4956]: I1211 22:31:10.704755 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fzj5l" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="registry-server" containerID="cri-o://9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa" gracePeriod=2 Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.189923 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.231686 4956 generic.go:334] "Generic (PLEG): container finished" podID="815f1988-351e-416f-b414-3ed53388a8ae" containerID="9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa" exitCode=0 Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.231754 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzj5l" event={"ID":"815f1988-351e-416f-b414-3ed53388a8ae","Type":"ContainerDied","Data":"9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa"} Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.232055 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fzj5l" event={"ID":"815f1988-351e-416f-b414-3ed53388a8ae","Type":"ContainerDied","Data":"54708f6f5935f07c53c7b3c119ec33da4df5a3f0828f1c5ede4c5efc556e5254"} Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.232085 4956 scope.go:117] "RemoveContainer" containerID="9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.231884 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fzj5l" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.251077 4956 scope.go:117] "RemoveContainer" containerID="6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.272981 4956 scope.go:117] "RemoveContainer" containerID="48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.302401 4956 scope.go:117] "RemoveContainer" containerID="9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa" Dec 11 22:31:12 crc kubenswrapper[4956]: E1211 22:31:12.302943 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa\": container with ID starting with 9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa not found: ID does not exist" containerID="9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.303002 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa"} err="failed to get container status \"9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa\": rpc error: code = NotFound desc = could not find container \"9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa\": container with ID starting with 9a890fbdb372d018b6e38f072610f4018ab24968feb967b0c946ac23e6cf42aa not found: ID does not exist" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.303034 4956 scope.go:117] "RemoveContainer" containerID="6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02" Dec 11 22:31:12 crc kubenswrapper[4956]: E1211 22:31:12.303397 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02\": container with ID starting with 6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02 not found: ID does not exist" containerID="6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.303423 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02"} err="failed to get container status \"6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02\": rpc error: code = NotFound desc = could not find container \"6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02\": container with ID starting with 6ea9ff6ae1391d04970683b6e8d3c20e87417933d9d5bf126452d6ebc7ec9d02 not found: ID does not exist" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.303442 4956 scope.go:117] "RemoveContainer" containerID="48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df" Dec 11 22:31:12 crc kubenswrapper[4956]: E1211 22:31:12.303804 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df\": container with ID starting with 48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df not found: ID does not exist" containerID="48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.303828 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df"} err="failed to get container status \"48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df\": rpc error: code = NotFound desc = could not find container \"48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df\": container with ID starting with 48360ba03fdf0f0f66c5c431a364a37705a970e388f6618d2272cd7b834975df not found: ID does not exist" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.306187 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnxh7\" (UniqueName: \"kubernetes.io/projected/815f1988-351e-416f-b414-3ed53388a8ae-kube-api-access-pnxh7\") pod \"815f1988-351e-416f-b414-3ed53388a8ae\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.306311 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-catalog-content\") pod \"815f1988-351e-416f-b414-3ed53388a8ae\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.306443 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-utilities\") pod \"815f1988-351e-416f-b414-3ed53388a8ae\" (UID: \"815f1988-351e-416f-b414-3ed53388a8ae\") " Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.306980 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-utilities" (OuterVolumeSpecName: "utilities") pod "815f1988-351e-416f-b414-3ed53388a8ae" (UID: "815f1988-351e-416f-b414-3ed53388a8ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.311233 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/815f1988-351e-416f-b414-3ed53388a8ae-kube-api-access-pnxh7" (OuterVolumeSpecName: "kube-api-access-pnxh7") pod "815f1988-351e-416f-b414-3ed53388a8ae" (UID: "815f1988-351e-416f-b414-3ed53388a8ae"). InnerVolumeSpecName "kube-api-access-pnxh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.324169 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "815f1988-351e-416f-b414-3ed53388a8ae" (UID: "815f1988-351e-416f-b414-3ed53388a8ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.407698 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.407735 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnxh7\" (UniqueName: \"kubernetes.io/projected/815f1988-351e-416f-b414-3ed53388a8ae-kube-api-access-pnxh7\") on node \"crc\" DevicePath \"\"" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.407746 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/815f1988-351e-416f-b414-3ed53388a8ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.564725 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzj5l"] Dec 11 22:31:12 crc kubenswrapper[4956]: I1211 22:31:12.568734 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fzj5l"] Dec 11 22:31:14 crc kubenswrapper[4956]: I1211 22:31:14.037363 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="815f1988-351e-416f-b414-3ed53388a8ae" path="/var/lib/kubelet/pods/815f1988-351e-416f-b414-3ed53388a8ae/volumes" Dec 11 22:31:20 crc kubenswrapper[4956]: I1211 22:31:20.021718 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:31:20 crc kubenswrapper[4956]: E1211 22:31:20.022919 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:31:24 crc kubenswrapper[4956]: I1211 22:31:24.862341 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-api-84dc9b8b44-tt7zq_3c282539-1074-4e74-bea2-dfe83a575a5c/barbican-api/0.log" Dec 11 22:31:24 crc kubenswrapper[4956]: I1211 22:31:24.898576 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-api-84dc9b8b44-tt7zq_3c282539-1074-4e74-bea2-dfe83a575a5c/barbican-api-log/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.138619 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-db-sync-s6kdn_8f094fcd-e905-4061-8726-a536b25ddbc7/barbican-db-sync/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.224335 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-keystone-listener-6f89b79686-srzks_496ff541-0e71-4497-89ac-e860c0c5300a/barbican-keystone-listener/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.338162 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-keystone-listener-6f89b79686-srzks_496ff541-0e71-4497-89ac-e860c0c5300a/barbican-keystone-listener-log/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.393034 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-worker-5549fbbc4f-bvswp_f8e74f81-6253-4f42-89ba-4023bdd47d65/barbican-worker/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.406744 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_barbican-worker-5549fbbc4f-bvswp_f8e74f81-6253-4f42-89ba-4023bdd47d65/barbican-worker-log/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.905985 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_keystone-85866bffd9-vxw6k_2591ae1a-7dde-47f5-a915-c623aa755a37/keystone-api/0.log" Dec 11 22:31:25 crc kubenswrapper[4956]: I1211 22:31:25.906733 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-0_bddf5832-7ec1-4c44-a8ff-7c6eae681927/mysql-bootstrap/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.079422 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-0_bddf5832-7ec1-4c44-a8ff-7c6eae681927/mysql-bootstrap/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.130087 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-0_bddf5832-7ec1-4c44-a8ff-7c6eae681927/galera/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.290905 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-1_94805819-605d-47fe-9670-957c387a50fb/mysql-bootstrap/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.449217 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-1_94805819-605d-47fe-9670-957c387a50fb/mysql-bootstrap/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.531097 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-1_94805819-605d-47fe-9670-957c387a50fb/galera/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.670353 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-2_9e73ef6f-6323-44c0-9ae8-b14eda333297/mysql-bootstrap/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.855441 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_memcached-0_ebf3438f-f633-4343-afbb-fab7515a880f/memcached/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.896447 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-2_9e73ef6f-6323-44c0-9ae8-b14eda333297/galera/0.log" Dec 11 22:31:26 crc kubenswrapper[4956]: I1211 22:31:26.925963 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_openstack-galera-2_9e73ef6f-6323-44c0-9ae8-b14eda333297/mysql-bootstrap/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.034598 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_rabbitmq-server-0_b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7/setup-container/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.275910 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_rabbitmq-server-0_b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7/setup-container/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.302497 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-proxy-7d4fb88647-dsz49_5c88182b-3428-4ab0-8a8c-939487bcc292/proxy-httpd/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.325221 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_rabbitmq-server-0_b3a33e0c-0ca4-4df5-9a83-e24f0659c9a7/rabbitmq/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.426566 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-proxy-7d4fb88647-dsz49_5c88182b-3428-4ab0-8a8c-939487bcc292/proxy-server/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.481107 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-8flfm_d385dbd2-4908-4c42-b48f-20109e20e76f/swift-ring-rebalance/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.651852 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/account-auditor/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.702131 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/account-reaper/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.714346 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/account-replicator/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.724315 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/account-server/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.836871 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/container-auditor/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.871148 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/container-server/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.877362 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/container-replicator/0.log" Dec 11 22:31:27 crc kubenswrapper[4956]: I1211 22:31:27.930730 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/container-updater/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.011758 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/object-auditor/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.037544 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/object-replicator/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.047216 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/object-expirer/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.106573 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/object-server/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.202759 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/object-updater/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.226884 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/rsync/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.239684 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-0_4de42e9b-773b-4150-809a-c9255878e80c/swift-recon-cron/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.380249 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/account-auditor/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.438711 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/account-replicator/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.438871 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/account-reaper/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.490337 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/account-server/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.551581 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/container-auditor/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.603021 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/container-server/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.636265 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/container-replicator/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.678516 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/container-updater/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.741628 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/object-auditor/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.751105 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/object-expirer/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.788487 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/object-replicator/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.829924 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/object-server/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.866202 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/object-updater/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.938611 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/rsync/0.log" Dec 11 22:31:28 crc kubenswrapper[4956]: I1211 22:31:28.941295 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-1_6d2691c4-da4b-45cc-9fd4-13c002eb3dd2/swift-recon-cron/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.000642 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/account-auditor/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.097623 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/account-reaper/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.133256 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/account-replicator/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.177185 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/account-server/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.222896 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/container-auditor/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.259637 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/container-replicator/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.322748 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/container-updater/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.353006 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/container-server/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.377927 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/object-auditor/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.390759 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/object-expirer/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.462755 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/object-replicator/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.499104 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/object-server/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.554797 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/object-updater/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.568423 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/rsync/0.log" Dec 11 22:31:29 crc kubenswrapper[4956]: I1211 22:31:29.602915 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-storage-2_20a57394-ad58-443c-9c3b-4ad74b00cf66/swift-recon-cron/0.log" Dec 11 22:31:35 crc kubenswrapper[4956]: I1211 22:31:35.021322 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:31:35 crc kubenswrapper[4956]: E1211 22:31:35.022045 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.425978 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/util/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.547984 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/util/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.569719 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/pull/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.569907 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/pull/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.728845 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/util/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.759060 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/pull/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.763735 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4vrkrt_b8512a22-f9f2-4250-93b8-c125367cc1ad/extract/0.log" Dec 11 22:31:42 crc kubenswrapper[4956]: I1211 22:31:42.941679 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/extract-utilities/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.045785 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/extract-utilities/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.045941 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/extract-content/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.069482 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/extract-content/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.257414 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/extract-utilities/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.263398 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/extract-content/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.461436 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/extract-utilities/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.669842 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/extract-content/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.672033 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/extract-utilities/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.753352 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/extract-content/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.858680 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9m6q7_d5287560-f940-4626-9cb6-1b0a16a25a1b/registry-server/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.929828 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/extract-utilities/0.log" Dec 11 22:31:43 crc kubenswrapper[4956]: I1211 22:31:43.940799 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/extract-content/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.115505 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-g7wp2_6a02d05c-9762-43bf-8ab5-2e7a1f7695bc/marketplace-operator/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.312023 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/extract-utilities/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.483595 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/extract-utilities/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.512866 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/extract-content/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.521388 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/extract-content/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.694125 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-zlq72_d029043f-7a0a-43fd-a899-ee79724ca7ac/registry-server/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.785070 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/extract-content/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.785189 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/extract-utilities/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.814239 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-f2f7k_abf95143-90a0-4f1d-a62b-aad64c96ede4/registry-server/0.log" Dec 11 22:31:44 crc kubenswrapper[4956]: I1211 22:31:44.978995 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/extract-utilities/0.log" Dec 11 22:31:45 crc kubenswrapper[4956]: I1211 22:31:45.116291 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/extract-utilities/0.log" Dec 11 22:31:45 crc kubenswrapper[4956]: I1211 22:31:45.122856 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/extract-content/0.log" Dec 11 22:31:45 crc kubenswrapper[4956]: I1211 22:31:45.128779 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/extract-content/0.log" Dec 11 22:31:45 crc kubenswrapper[4956]: I1211 22:31:45.294389 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/extract-utilities/0.log" Dec 11 22:31:45 crc kubenswrapper[4956]: I1211 22:31:45.304410 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/extract-content/0.log" Dec 11 22:31:45 crc kubenswrapper[4956]: I1211 22:31:45.682572 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7cd6d_3c6c2a49-1562-4731-83ac-680213b5830f/registry-server/0.log" Dec 11 22:31:50 crc kubenswrapper[4956]: I1211 22:31:50.022094 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:31:50 crc kubenswrapper[4956]: E1211 22:31:50.022970 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:32:04 crc kubenswrapper[4956]: I1211 22:32:04.026984 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:32:04 crc kubenswrapper[4956]: E1211 22:32:04.029206 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:32:19 crc kubenswrapper[4956]: I1211 22:32:19.021065 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:32:19 crc kubenswrapper[4956]: E1211 22:32:19.021826 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:32:34 crc kubenswrapper[4956]: I1211 22:32:34.021179 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:32:34 crc kubenswrapper[4956]: E1211 22:32:34.023387 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:32:46 crc kubenswrapper[4956]: I1211 22:32:46.025879 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:32:46 crc kubenswrapper[4956]: E1211 22:32:46.026697 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:32:52 crc kubenswrapper[4956]: I1211 22:32:52.076490 4956 generic.go:334] "Generic (PLEG): container finished" podID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerID="a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3" exitCode=0 Dec 11 22:32:52 crc kubenswrapper[4956]: I1211 22:32:52.076573 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-22hfd/must-gather-qncql" event={"ID":"a6aaddfb-38ee-494d-abaf-4a87a02d6d97","Type":"ContainerDied","Data":"a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3"} Dec 11 22:32:52 crc kubenswrapper[4956]: I1211 22:32:52.077440 4956 scope.go:117] "RemoveContainer" containerID="a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3" Dec 11 22:32:52 crc kubenswrapper[4956]: I1211 22:32:52.421098 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-22hfd_must-gather-qncql_a6aaddfb-38ee-494d-abaf-4a87a02d6d97/gather/0.log" Dec 11 22:32:57 crc kubenswrapper[4956]: I1211 22:32:57.022297 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:32:57 crc kubenswrapper[4956]: E1211 22:32:57.023326 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.333086 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-22hfd/must-gather-qncql"] Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.333554 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-22hfd/must-gather-qncql" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="copy" containerID="cri-o://76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04" gracePeriod=2 Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.339611 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-22hfd/must-gather-qncql"] Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.728606 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-22hfd_must-gather-qncql_a6aaddfb-38ee-494d-abaf-4a87a02d6d97/copy/0.log" Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.729345 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.874367 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-must-gather-output\") pod \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.874428 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5rqf\" (UniqueName: \"kubernetes.io/projected/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-kube-api-access-v5rqf\") pod \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\" (UID: \"a6aaddfb-38ee-494d-abaf-4a87a02d6d97\") " Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.881922 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-kube-api-access-v5rqf" (OuterVolumeSpecName: "kube-api-access-v5rqf") pod "a6aaddfb-38ee-494d-abaf-4a87a02d6d97" (UID: "a6aaddfb-38ee-494d-abaf-4a87a02d6d97"). InnerVolumeSpecName "kube-api-access-v5rqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.976344 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5rqf\" (UniqueName: \"kubernetes.io/projected/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-kube-api-access-v5rqf\") on node \"crc\" DevicePath \"\"" Dec 11 22:32:59 crc kubenswrapper[4956]: I1211 22:32:59.990534 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a6aaddfb-38ee-494d-abaf-4a87a02d6d97" (UID: "a6aaddfb-38ee-494d-abaf-4a87a02d6d97"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.049748 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" path="/var/lib/kubelet/pods/a6aaddfb-38ee-494d-abaf-4a87a02d6d97/volumes" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.077730 4956 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a6aaddfb-38ee-494d-abaf-4a87a02d6d97-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.133313 4956 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-22hfd_must-gather-qncql_a6aaddfb-38ee-494d-abaf-4a87a02d6d97/copy/0.log" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.133677 4956 generic.go:334] "Generic (PLEG): container finished" podID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerID="76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04" exitCode=143 Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.133731 4956 scope.go:117] "RemoveContainer" containerID="76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.133757 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-22hfd/must-gather-qncql" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.153882 4956 scope.go:117] "RemoveContainer" containerID="a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.203395 4956 scope.go:117] "RemoveContainer" containerID="76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04" Dec 11 22:33:00 crc kubenswrapper[4956]: E1211 22:33:00.203878 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04\": container with ID starting with 76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04 not found: ID does not exist" containerID="76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.203913 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04"} err="failed to get container status \"76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04\": rpc error: code = NotFound desc = could not find container \"76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04\": container with ID starting with 76e7aee1684e0fd16d6b1249940b49f426c3adf92af3e805053e7a5c9eef0f04 not found: ID does not exist" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.203970 4956 scope.go:117] "RemoveContainer" containerID="a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3" Dec 11 22:33:00 crc kubenswrapper[4956]: E1211 22:33:00.204421 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3\": container with ID starting with a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3 not found: ID does not exist" containerID="a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3" Dec 11 22:33:00 crc kubenswrapper[4956]: I1211 22:33:00.204476 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3"} err="failed to get container status \"a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3\": rpc error: code = NotFound desc = could not find container \"a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3\": container with ID starting with a3310445f807215bf4148fb1edc9d7f06c3f778fee9365c76a98acbf426ab2c3 not found: ID does not exist" Dec 11 22:33:10 crc kubenswrapper[4956]: I1211 22:33:10.021042 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:33:10 crc kubenswrapper[4956]: E1211 22:33:10.021702 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:33:21 crc kubenswrapper[4956]: I1211 22:33:21.021275 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:33:21 crc kubenswrapper[4956]: E1211 22:33:21.022537 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.587261 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dkcf7"] Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588189 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="extract-content" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588206 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="extract-content" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588232 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="registry-server" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588239 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="registry-server" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588250 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="extract-utilities" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588259 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="extract-utilities" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588306 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="gather" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588313 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="gather" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588324 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="extract-content" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588330 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="extract-content" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588337 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="extract-utilities" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588344 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="extract-utilities" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588355 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="copy" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588361 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="copy" Dec 11 22:33:25 crc kubenswrapper[4956]: E1211 22:33:25.588373 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="registry-server" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588378 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="registry-server" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588513 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="copy" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588521 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="815f1988-351e-416f-b414-3ed53388a8ae" containerName="registry-server" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588528 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="d84882a0-637c-43af-b362-998d4738fb09" containerName="registry-server" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.588547 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6aaddfb-38ee-494d-abaf-4a87a02d6d97" containerName="gather" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.591614 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.598298 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dkcf7"] Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.650125 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-utilities\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.650236 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-catalog-content\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.650602 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndmtn\" (UniqueName: \"kubernetes.io/projected/b1168193-2146-4e68-afe1-3edded018ce7-kube-api-access-ndmtn\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.752081 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-utilities\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.752159 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-catalog-content\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.752274 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndmtn\" (UniqueName: \"kubernetes.io/projected/b1168193-2146-4e68-afe1-3edded018ce7-kube-api-access-ndmtn\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.752888 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-utilities\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.752940 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-catalog-content\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.776606 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndmtn\" (UniqueName: \"kubernetes.io/projected/b1168193-2146-4e68-afe1-3edded018ce7-kube-api-access-ndmtn\") pod \"community-operators-dkcf7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:25 crc kubenswrapper[4956]: I1211 22:33:25.914252 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:26 crc kubenswrapper[4956]: I1211 22:33:26.401607 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dkcf7"] Dec 11 22:33:27 crc kubenswrapper[4956]: I1211 22:33:27.338585 4956 generic.go:334] "Generic (PLEG): container finished" podID="b1168193-2146-4e68-afe1-3edded018ce7" containerID="5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36" exitCode=0 Dec 11 22:33:27 crc kubenswrapper[4956]: I1211 22:33:27.338665 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerDied","Data":"5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36"} Dec 11 22:33:27 crc kubenswrapper[4956]: I1211 22:33:27.338949 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerStarted","Data":"6358ecc5f5637013e4650b179196e324d6d084266c52c249d444a7a7e7126f9b"} Dec 11 22:33:28 crc kubenswrapper[4956]: I1211 22:33:28.347866 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerStarted","Data":"f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8"} Dec 11 22:33:29 crc kubenswrapper[4956]: I1211 22:33:29.357742 4956 generic.go:334] "Generic (PLEG): container finished" podID="b1168193-2146-4e68-afe1-3edded018ce7" containerID="f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8" exitCode=0 Dec 11 22:33:29 crc kubenswrapper[4956]: I1211 22:33:29.357936 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerDied","Data":"f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8"} Dec 11 22:33:30 crc kubenswrapper[4956]: I1211 22:33:30.367266 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerStarted","Data":"e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069"} Dec 11 22:33:30 crc kubenswrapper[4956]: I1211 22:33:30.389195 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dkcf7" podStartSLOduration=2.670252489 podStartE2EDuration="5.389175761s" podCreationTimestamp="2025-12-11 22:33:25 +0000 UTC" firstStartedPulling="2025-12-11 22:33:27.341294622 +0000 UTC m=+2699.785672782" lastFinishedPulling="2025-12-11 22:33:30.060217904 +0000 UTC m=+2702.504596054" observedRunningTime="2025-12-11 22:33:30.388880133 +0000 UTC m=+2702.833258313" watchObservedRunningTime="2025-12-11 22:33:30.389175761 +0000 UTC m=+2702.833553911" Dec 11 22:33:35 crc kubenswrapper[4956]: I1211 22:33:35.022689 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:33:35 crc kubenswrapper[4956]: E1211 22:33:35.023580 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:33:35 crc kubenswrapper[4956]: I1211 22:33:35.914919 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:35 crc kubenswrapper[4956]: I1211 22:33:35.914969 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:35 crc kubenswrapper[4956]: I1211 22:33:35.975909 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:36 crc kubenswrapper[4956]: I1211 22:33:36.453869 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:44 crc kubenswrapper[4956]: I1211 22:33:44.365579 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dkcf7"] Dec 11 22:33:44 crc kubenswrapper[4956]: I1211 22:33:44.367063 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dkcf7" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="registry-server" containerID="cri-o://e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" gracePeriod=2 Dec 11 22:33:45 crc kubenswrapper[4956]: E1211 22:33:45.915412 4956 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069 is running failed: container process not found" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 22:33:45 crc kubenswrapper[4956]: E1211 22:33:45.916284 4956 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069 is running failed: container process not found" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 22:33:45 crc kubenswrapper[4956]: E1211 22:33:45.916604 4956 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069 is running failed: container process not found" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 22:33:45 crc kubenswrapper[4956]: E1211 22:33:45.916626 4956 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-dkcf7" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="registry-server" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.021122 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:33:47 crc kubenswrapper[4956]: E1211 22:33:47.021474 4956 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-h6mx2_openshift-machine-config-operator(cf61c63b-b06c-4f51-add2-aefe57de751a)\"" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.489473 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.504407 4956 generic.go:334] "Generic (PLEG): container finished" podID="b1168193-2146-4e68-afe1-3edded018ce7" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" exitCode=0 Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.504458 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerDied","Data":"e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069"} Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.504471 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkcf7" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.504489 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkcf7" event={"ID":"b1168193-2146-4e68-afe1-3edded018ce7","Type":"ContainerDied","Data":"6358ecc5f5637013e4650b179196e324d6d084266c52c249d444a7a7e7126f9b"} Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.504521 4956 scope.go:117] "RemoveContainer" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.561631 4956 scope.go:117] "RemoveContainer" containerID="f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.574271 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-utilities\") pod \"b1168193-2146-4e68-afe1-3edded018ce7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.574411 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndmtn\" (UniqueName: \"kubernetes.io/projected/b1168193-2146-4e68-afe1-3edded018ce7-kube-api-access-ndmtn\") pod \"b1168193-2146-4e68-afe1-3edded018ce7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.574445 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-catalog-content\") pod \"b1168193-2146-4e68-afe1-3edded018ce7\" (UID: \"b1168193-2146-4e68-afe1-3edded018ce7\") " Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.580451 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1168193-2146-4e68-afe1-3edded018ce7-kube-api-access-ndmtn" (OuterVolumeSpecName: "kube-api-access-ndmtn") pod "b1168193-2146-4e68-afe1-3edded018ce7" (UID: "b1168193-2146-4e68-afe1-3edded018ce7"). InnerVolumeSpecName "kube-api-access-ndmtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.581730 4956 scope.go:117] "RemoveContainer" containerID="5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.585822 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-utilities" (OuterVolumeSpecName: "utilities") pod "b1168193-2146-4e68-afe1-3edded018ce7" (UID: "b1168193-2146-4e68-afe1-3edded018ce7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.633544 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1168193-2146-4e68-afe1-3edded018ce7" (UID: "b1168193-2146-4e68-afe1-3edded018ce7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.637973 4956 scope.go:117] "RemoveContainer" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" Dec 11 22:33:47 crc kubenswrapper[4956]: E1211 22:33:47.638358 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069\": container with ID starting with e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069 not found: ID does not exist" containerID="e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.638404 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069"} err="failed to get container status \"e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069\": rpc error: code = NotFound desc = could not find container \"e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069\": container with ID starting with e13671d2407e5e463e456e2a53aaa7a825c9fed5400955b74aa4604fdd046069 not found: ID does not exist" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.638424 4956 scope.go:117] "RemoveContainer" containerID="f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8" Dec 11 22:33:47 crc kubenswrapper[4956]: E1211 22:33:47.638831 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8\": container with ID starting with f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8 not found: ID does not exist" containerID="f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.638884 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8"} err="failed to get container status \"f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8\": rpc error: code = NotFound desc = could not find container \"f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8\": container with ID starting with f8e4a16483aa419359f133c133164bdb941a1fda72810e4e278ddaf3b895aaf8 not found: ID does not exist" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.638919 4956 scope.go:117] "RemoveContainer" containerID="5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36" Dec 11 22:33:47 crc kubenswrapper[4956]: E1211 22:33:47.639317 4956 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36\": container with ID starting with 5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36 not found: ID does not exist" containerID="5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.639344 4956 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36"} err="failed to get container status \"5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36\": rpc error: code = NotFound desc = could not find container \"5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36\": container with ID starting with 5ba1cd26e812365e825f8011c1e703889031993c4c32156ec0fca4515dbdbe36 not found: ID does not exist" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.676354 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndmtn\" (UniqueName: \"kubernetes.io/projected/b1168193-2146-4e68-afe1-3edded018ce7-kube-api-access-ndmtn\") on node \"crc\" DevicePath \"\"" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.676388 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.676400 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1168193-2146-4e68-afe1-3edded018ce7-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.844115 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dkcf7"] Dec 11 22:33:47 crc kubenswrapper[4956]: I1211 22:33:47.853488 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dkcf7"] Dec 11 22:33:48 crc kubenswrapper[4956]: I1211 22:33:48.036948 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1168193-2146-4e68-afe1-3edded018ce7" path="/var/lib/kubelet/pods/b1168193-2146-4e68-afe1-3edded018ce7/volumes" Dec 11 22:34:01 crc kubenswrapper[4956]: I1211 22:34:01.021858 4956 scope.go:117] "RemoveContainer" containerID="5f9568ec17531092b6f7cdf7f4f7b946873e9651dc5cec94fe247690286da648" Dec 11 22:34:01 crc kubenswrapper[4956]: I1211 22:34:01.688022 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" event={"ID":"cf61c63b-b06c-4f51-add2-aefe57de751a","Type":"ContainerStarted","Data":"4c8dbf4328e8490d7d4cb82864879a50ea4264e5dec2d3f4dd3efe8bb46ab7d0"} Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.543189 4956 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wr5bp"] Dec 11 22:35:28 crc kubenswrapper[4956]: E1211 22:35:28.545078 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="extract-utilities" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.545181 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="extract-utilities" Dec 11 22:35:28 crc kubenswrapper[4956]: E1211 22:35:28.545260 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="extract-content" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.545315 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="extract-content" Dec 11 22:35:28 crc kubenswrapper[4956]: E1211 22:35:28.545382 4956 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="registry-server" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.545434 4956 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="registry-server" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.545631 4956 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1168193-2146-4e68-afe1-3edded018ce7" containerName="registry-server" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.546735 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.564249 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wr5bp"] Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.583582 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-catalog-content\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.584219 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxx2j\" (UniqueName: \"kubernetes.io/projected/88d841a9-6338-4154-9e6d-7695b9336158-kube-api-access-jxx2j\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.584273 4956 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-utilities\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.684862 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-catalog-content\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.684906 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxx2j\" (UniqueName: \"kubernetes.io/projected/88d841a9-6338-4154-9e6d-7695b9336158-kube-api-access-jxx2j\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.684929 4956 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-utilities\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.685548 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-utilities\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.687113 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-catalog-content\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.703224 4956 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxx2j\" (UniqueName: \"kubernetes.io/projected/88d841a9-6338-4154-9e6d-7695b9336158-kube-api-access-jxx2j\") pod \"certified-operators-wr5bp\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:28 crc kubenswrapper[4956]: I1211 22:35:28.886427 4956 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:29 crc kubenswrapper[4956]: I1211 22:35:29.361806 4956 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wr5bp"] Dec 11 22:35:29 crc kubenswrapper[4956]: I1211 22:35:29.406474 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerStarted","Data":"f96584c01cb480dded626f30d59d22f2d031148d1e11158624a47d70518db140"} Dec 11 22:35:30 crc kubenswrapper[4956]: I1211 22:35:30.415013 4956 generic.go:334] "Generic (PLEG): container finished" podID="88d841a9-6338-4154-9e6d-7695b9336158" containerID="cae2efbee97e4cfd2976fd40f8597d16d78533005417ab637b4ad614801430ee" exitCode=0 Dec 11 22:35:30 crc kubenswrapper[4956]: I1211 22:35:30.415088 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerDied","Data":"cae2efbee97e4cfd2976fd40f8597d16d78533005417ab637b4ad614801430ee"} Dec 11 22:35:30 crc kubenswrapper[4956]: I1211 22:35:30.418246 4956 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 22:35:31 crc kubenswrapper[4956]: I1211 22:35:31.424607 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerStarted","Data":"0449c8329f7c285696f254dcd93f1aee9bfe016bbc731691cdf71197e4b029b1"} Dec 11 22:35:32 crc kubenswrapper[4956]: I1211 22:35:32.447007 4956 generic.go:334] "Generic (PLEG): container finished" podID="88d841a9-6338-4154-9e6d-7695b9336158" containerID="0449c8329f7c285696f254dcd93f1aee9bfe016bbc731691cdf71197e4b029b1" exitCode=0 Dec 11 22:35:32 crc kubenswrapper[4956]: I1211 22:35:32.447062 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerDied","Data":"0449c8329f7c285696f254dcd93f1aee9bfe016bbc731691cdf71197e4b029b1"} Dec 11 22:35:33 crc kubenswrapper[4956]: I1211 22:35:33.454982 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerStarted","Data":"999bd3f153a0a5b5618129e8d3950dc8d563ad25894bfa22bd02bfb4e4791534"} Dec 11 22:35:33 crc kubenswrapper[4956]: I1211 22:35:33.473091 4956 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wr5bp" podStartSLOduration=2.953566043 podStartE2EDuration="5.47307167s" podCreationTimestamp="2025-12-11 22:35:28 +0000 UTC" firstStartedPulling="2025-12-11 22:35:30.418013485 +0000 UTC m=+2822.862391625" lastFinishedPulling="2025-12-11 22:35:32.937519102 +0000 UTC m=+2825.381897252" observedRunningTime="2025-12-11 22:35:33.470845029 +0000 UTC m=+2825.915223189" watchObservedRunningTime="2025-12-11 22:35:33.47307167 +0000 UTC m=+2825.917449820" Dec 11 22:35:38 crc kubenswrapper[4956]: I1211 22:35:38.887103 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:38 crc kubenswrapper[4956]: I1211 22:35:38.887668 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:38 crc kubenswrapper[4956]: I1211 22:35:38.935872 4956 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:39 crc kubenswrapper[4956]: I1211 22:35:39.548620 4956 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:42 crc kubenswrapper[4956]: I1211 22:35:42.930114 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wr5bp"] Dec 11 22:35:42 crc kubenswrapper[4956]: I1211 22:35:42.930660 4956 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wr5bp" podUID="88d841a9-6338-4154-9e6d-7695b9336158" containerName="registry-server" containerID="cri-o://999bd3f153a0a5b5618129e8d3950dc8d563ad25894bfa22bd02bfb4e4791534" gracePeriod=2 Dec 11 22:35:45 crc kubenswrapper[4956]: I1211 22:35:45.552572 4956 generic.go:334] "Generic (PLEG): container finished" podID="88d841a9-6338-4154-9e6d-7695b9336158" containerID="999bd3f153a0a5b5618129e8d3950dc8d563ad25894bfa22bd02bfb4e4791534" exitCode=0 Dec 11 22:35:45 crc kubenswrapper[4956]: I1211 22:35:45.552641 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerDied","Data":"999bd3f153a0a5b5618129e8d3950dc8d563ad25894bfa22bd02bfb4e4791534"} Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.244650 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.398564 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxx2j\" (UniqueName: \"kubernetes.io/projected/88d841a9-6338-4154-9e6d-7695b9336158-kube-api-access-jxx2j\") pod \"88d841a9-6338-4154-9e6d-7695b9336158\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.398652 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-utilities\") pod \"88d841a9-6338-4154-9e6d-7695b9336158\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.398733 4956 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-catalog-content\") pod \"88d841a9-6338-4154-9e6d-7695b9336158\" (UID: \"88d841a9-6338-4154-9e6d-7695b9336158\") " Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.399792 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-utilities" (OuterVolumeSpecName: "utilities") pod "88d841a9-6338-4154-9e6d-7695b9336158" (UID: "88d841a9-6338-4154-9e6d-7695b9336158"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.405158 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88d841a9-6338-4154-9e6d-7695b9336158-kube-api-access-jxx2j" (OuterVolumeSpecName: "kube-api-access-jxx2j") pod "88d841a9-6338-4154-9e6d-7695b9336158" (UID: "88d841a9-6338-4154-9e6d-7695b9336158"). InnerVolumeSpecName "kube-api-access-jxx2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.451050 4956 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88d841a9-6338-4154-9e6d-7695b9336158" (UID: "88d841a9-6338-4154-9e6d-7695b9336158"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.500053 4956 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.500333 4956 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxx2j\" (UniqueName: \"kubernetes.io/projected/88d841a9-6338-4154-9e6d-7695b9336158-kube-api-access-jxx2j\") on node \"crc\" DevicePath \"\"" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.500407 4956 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88d841a9-6338-4154-9e6d-7695b9336158-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.573920 4956 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wr5bp" event={"ID":"88d841a9-6338-4154-9e6d-7695b9336158","Type":"ContainerDied","Data":"f96584c01cb480dded626f30d59d22f2d031148d1e11158624a47d70518db140"} Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.574000 4956 scope.go:117] "RemoveContainer" containerID="999bd3f153a0a5b5618129e8d3950dc8d563ad25894bfa22bd02bfb4e4791534" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.574011 4956 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wr5bp" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.593934 4956 scope.go:117] "RemoveContainer" containerID="0449c8329f7c285696f254dcd93f1aee9bfe016bbc731691cdf71197e4b029b1" Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.617597 4956 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wr5bp"] Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.627038 4956 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wr5bp"] Dec 11 22:35:46 crc kubenswrapper[4956]: I1211 22:35:46.636026 4956 scope.go:117] "RemoveContainer" containerID="cae2efbee97e4cfd2976fd40f8597d16d78533005417ab637b4ad614801430ee" Dec 11 22:35:48 crc kubenswrapper[4956]: I1211 22:35:48.031818 4956 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88d841a9-6338-4154-9e6d-7695b9336158" path="/var/lib/kubelet/pods/88d841a9-6338-4154-9e6d-7695b9336158/volumes" Dec 11 22:36:16 crc kubenswrapper[4956]: I1211 22:36:16.888932 4956 patch_prober.go:28] interesting pod/machine-config-daemon-h6mx2 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 22:36:16 crc kubenswrapper[4956]: I1211 22:36:16.889854 4956 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-h6mx2" podUID="cf61c63b-b06c-4f51-add2-aefe57de751a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116643561024455 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116643561017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116635516016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116635517015467 5ustar corecore